summaryrefslogtreecommitdiffstats
path: root/bitbake
diff options
context:
space:
mode:
authorKhem Raj <raj.khem@gmail.com>2011-02-23 10:48:51 -0800
committerKhem Raj <raj.khem@gmail.com>2011-02-23 10:48:51 -0800
commit951cbf3f65f347c7a7bbcae193218f9187a15fbf (patch)
tree96e9c74551e6931804992f8f49af05f732eb0fff /bitbake
parentadbaae2179a6c3746e53f7fbb2ca0939e85a7ea9 (diff)
downloadopenembedded-core-951cbf3f65f347c7a7bbcae193218f9187a15fbf.tar.gz
bitbake: Remove in-tree version
Bitbake should be used by checking it out from its own repo Signed-off-by: Khem Raj <raj.khem@gmail.com>
Diffstat (limited to 'bitbake')
-rw-r--r--bitbake/AUTHORS10
-rw-r--r--bitbake/COPYING339
-rw-r--r--bitbake/ChangeLog317
-rw-r--r--bitbake/HEADER19
-rwxr-xr-xbitbake/bin/bitbake224
-rwxr-xr-xbitbake/bin/bitbake-diffsigs12
-rw-r--r--bitbake/bin/bitbake-layers154
-rwxr-xr-xbitbake/bin/bitbake-runtask120
-rwxr-xr-xbitbake/bin/bitdoc532
-rw-r--r--bitbake/contrib/README1
-rw-r--r--bitbake/contrib/bbdev.sh31
-rw-r--r--bitbake/contrib/vim/ftdetect/bitbake.vim24
-rw-r--r--bitbake/contrib/vim/ftplugin/bitbake.vim1
-rwxr-xr-xbitbake/contrib/vim/plugin/newbb.vim85
-rw-r--r--bitbake/contrib/vim/syntax/bitbake.vim123
-rw-r--r--bitbake/doc/COPYING.GPL339
-rw-r--r--bitbake/doc/COPYING.MIT17
-rw-r--r--bitbake/doc/bitbake.1121
-rw-r--r--bitbake/doc/manual/Makefile56
-rw-r--r--bitbake/doc/manual/html.css281
-rw-r--r--bitbake/doc/manual/usermanual.xml534
-rw-r--r--bitbake/lib/bb/COW.py323
-rw-r--r--bitbake/lib/bb/__init__.py139
-rw-r--r--bitbake/lib/bb/build.py472
-rw-r--r--bitbake/lib/bb/cache.py632
-rw-r--r--bitbake/lib/bb/codeparser.py336
-rw-r--r--bitbake/lib/bb/command.py271
-rw-r--r--bitbake/lib/bb/cooker.py1078
-rw-r--r--bitbake/lib/bb/daemonize.py190
-rw-r--r--bitbake/lib/bb/data.py338
-rw-r--r--bitbake/lib/bb/data_smart.py428
-rw-r--r--bitbake/lib/bb/event.py386
-rw-r--r--bitbake/lib/bb/fetch/__init__.py836
-rw-r--r--bitbake/lib/bb/fetch/bzr.py148
-rw-r--r--bitbake/lib/bb/fetch/cvs.py172
-rw-r--r--bitbake/lib/bb/fetch/git.py339
-rw-r--r--bitbake/lib/bb/fetch/hg.py180
-rw-r--r--bitbake/lib/bb/fetch/local.py73
-rw-r--r--bitbake/lib/bb/fetch/osc.py143
-rw-r--r--bitbake/lib/bb/fetch/perforce.py206
-rw-r--r--bitbake/lib/bb/fetch/repo.py98
-rw-r--r--bitbake/lib/bb/fetch/ssh.py118
-rw-r--r--bitbake/lib/bb/fetch/svk.py104
-rw-r--r--bitbake/lib/bb/fetch/svn.py204
-rw-r--r--bitbake/lib/bb/fetch/wget.py93
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py1074
-rw-r--r--bitbake/lib/bb/fetch2/bzr.py141
-rw-r--r--bitbake/lib/bb/fetch2/cvs.py181
-rw-r--r--bitbake/lib/bb/fetch2/git.py242
-rw-r--r--bitbake/lib/bb/fetch2/hg.py174
-rw-r--r--bitbake/lib/bb/fetch2/local.py80
-rw-r--r--bitbake/lib/bb/fetch2/osc.py135
-rw-r--r--bitbake/lib/bb/fetch2/perforce.py196
-rw-r--r--bitbake/lib/bb/fetch2/repo.py98
-rw-r--r--bitbake/lib/bb/fetch2/ssh.py120
-rw-r--r--bitbake/lib/bb/fetch2/svk.py97
-rw-r--r--bitbake/lib/bb/fetch2/svn.py180
-rw-r--r--bitbake/lib/bb/fetch2/wget.py91
-rw-r--r--bitbake/lib/bb/methodpool.py84
-rw-r--r--bitbake/lib/bb/msg.py200
-rw-r--r--bitbake/lib/bb/parse/__init__.py123
-rw-r--r--bitbake/lib/bb/parse/ast.py446
-rw-r--r--bitbake/lib/bb/parse/parse_py/BBHandler.py254
-rw-r--r--bitbake/lib/bb/parse/parse_py/ConfHandler.py139
-rw-r--r--bitbake/lib/bb/parse/parse_py/__init__.py33
-rw-r--r--bitbake/lib/bb/persist_data.py194
-rw-r--r--bitbake/lib/bb/process.py109
-rw-r--r--bitbake/lib/bb/providers.py330
-rw-r--r--bitbake/lib/bb/pysh/__init__.py0
-rw-r--r--bitbake/lib/bb/pysh/builtin.py710
-rw-r--r--bitbake/lib/bb/pysh/interp.py1367
-rw-r--r--bitbake/lib/bb/pysh/lsprof.py116
-rw-r--r--bitbake/lib/bb/pysh/pysh.py167
-rw-r--r--bitbake/lib/bb/pysh/pyshlex.py888
-rw-r--r--bitbake/lib/bb/pysh/pyshyacc.py779
-rw-r--r--bitbake/lib/bb/pysh/sherrors.py41
-rw-r--r--bitbake/lib/bb/pysh/subprocess_fix.py77
-rw-r--r--bitbake/lib/bb/runqueue.py1663
-rw-r--r--bitbake/lib/bb/server/__init__.py0
-rw-r--r--bitbake/lib/bb/server/none.py195
-rw-r--r--bitbake/lib/bb/server/xmlrpc.py260
-rw-r--r--bitbake/lib/bb/shell.py820
-rw-r--r--bitbake/lib/bb/siggen.py298
-rw-r--r--bitbake/lib/bb/taskdata.py586
-rw-r--r--bitbake/lib/bb/ui/__init__.py17
-rw-r--r--bitbake/lib/bb/ui/crumbs/__init__.py17
-rw-r--r--bitbake/lib/bb/ui/crumbs/buildmanager.py455
-rw-r--r--bitbake/lib/bb/ui/crumbs/progress.py17
-rw-r--r--bitbake/lib/bb/ui/crumbs/puccho.glade606
-rw-r--r--bitbake/lib/bb/ui/crumbs/runningbuild.py311
-rw-r--r--bitbake/lib/bb/ui/depexp.py307
-rw-r--r--bitbake/lib/bb/ui/goggle.py110
-rw-r--r--bitbake/lib/bb/ui/knotty.py248
-rw-r--r--bitbake/lib/bb/ui/ncurses.py352
-rw-r--r--bitbake/lib/bb/ui/puccho.py425
-rw-r--r--bitbake/lib/bb/ui/uievent.py127
-rw-r--r--bitbake/lib/bb/ui/uihelper.py42
-rw-r--r--bitbake/lib/bb/utils.py845
-rw-r--r--bitbake/lib/codegen.py570
-rw-r--r--bitbake/lib/ply/__init__.py4
-rw-r--r--bitbake/lib/ply/lex.py1058
-rw-r--r--bitbake/lib/ply/yacc.py3276
-rw-r--r--bitbake/lib/progressbar.py384
103 files changed, 0 insertions, 32506 deletions
diff --git a/bitbake/AUTHORS b/bitbake/AUTHORS
deleted file mode 100644
index 91fd78fd25..0000000000
--- a/bitbake/AUTHORS
+++ /dev/null
@@ -1,10 +0,0 @@
-Tim Ansell <mithro@mithis.net>
-Phil Blundell <pb@handhelds.org>
-Seb Frankengul <seb@frankengul.org>
-Holger Freyther <holger@moiji-mobile.com>
-Marcin Juszkiewicz <marcin@juszkiewicz.com.pl>
-Chris Larson <kergoth@handhelds.org>
-Ulrich Luckas <luckas@musoft.de>
-Mickey Lauer <mickey@Vanille.de>
-Richard Purdie <rpurdie@rpsys.net>
-Holger Schurig <holgerschurig@gmx.de>
diff --git a/bitbake/COPYING b/bitbake/COPYING
deleted file mode 100644
index d511905c16..0000000000
--- a/bitbake/COPYING
+++ /dev/null
@@ -1,339 +0,0 @@
- GNU GENERAL PUBLIC LICENSE
- Version 2, June 1991
-
- Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
- 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
- Preamble
-
- The licenses for most software are designed to take away your
-freedom to share and change it. By contrast, the GNU General Public
-License is intended to guarantee your freedom to share and change free
-software--to make sure the software is free for all its users. This
-General Public License applies to most of the Free Software
-Foundation's software and to any other program whose authors commit to
-using it. (Some other Free Software Foundation software is covered by
-the GNU Lesser General Public License instead.) You can apply it to
-your programs, too.
-
- When we speak of free software, we are referring to freedom, not
-price. Our General Public Licenses are designed to make sure that you
-have the freedom to distribute copies of free software (and charge for
-this service if you wish), that you receive source code or can get it
-if you want it, that you can change the software or use pieces of it
-in new free programs; and that you know you can do these things.
-
- To protect your rights, we need to make restrictions that forbid
-anyone to deny you these rights or to ask you to surrender the rights.
-These restrictions translate to certain responsibilities for you if you
-distribute copies of the software, or if you modify it.
-
- For example, if you distribute copies of such a program, whether
-gratis or for a fee, you must give the recipients all the rights that
-you have. You must make sure that they, too, receive or can get the
-source code. And you must show them these terms so they know their
-rights.
-
- We protect your rights with two steps: (1) copyright the software, and
-(2) offer you this license which gives you legal permission to copy,
-distribute and/or modify the software.
-
- Also, for each author's protection and ours, we want to make certain
-that everyone understands that there is no warranty for this free
-software. If the software is modified by someone else and passed on, we
-want its recipients to know that what they have is not the original, so
-that any problems introduced by others will not reflect on the original
-authors' reputations.
-
- Finally, any free program is threatened constantly by software
-patents. We wish to avoid the danger that redistributors of a free
-program will individually obtain patent licenses, in effect making the
-program proprietary. To prevent this, we have made it clear that any
-patent must be licensed for everyone's free use or not licensed at all.
-
- The precise terms and conditions for copying, distribution and
-modification follow.
-
- GNU GENERAL PUBLIC LICENSE
- TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
-
- 0. This License applies to any program or other work which contains
-a notice placed by the copyright holder saying it may be distributed
-under the terms of this General Public License. The "Program", below,
-refers to any such program or work, and a "work based on the Program"
-means either the Program or any derivative work under copyright law:
-that is to say, a work containing the Program or a portion of it,
-either verbatim or with modifications and/or translated into another
-language. (Hereinafter, translation is included without limitation in
-the term "modification".) Each licensee is addressed as "you".
-
-Activities other than copying, distribution and modification are not
-covered by this License; they are outside its scope. The act of
-running the Program is not restricted, and the output from the Program
-is covered only if its contents constitute a work based on the
-Program (independent of having been made by running the Program).
-Whether that is true depends on what the Program does.
-
- 1. You may copy and distribute verbatim copies of the Program's
-source code as you receive it, in any medium, provided that you
-conspicuously and appropriately publish on each copy an appropriate
-copyright notice and disclaimer of warranty; keep intact all the
-notices that refer to this License and to the absence of any warranty;
-and give any other recipients of the Program a copy of this License
-along with the Program.
-
-You may charge a fee for the physical act of transferring a copy, and
-you may at your option offer warranty protection in exchange for a fee.
-
- 2. You may modify your copy or copies of the Program or any portion
-of it, thus forming a work based on the Program, and copy and
-distribute such modifications or work under the terms of Section 1
-above, provided that you also meet all of these conditions:
-
- a) You must cause the modified files to carry prominent notices
- stating that you changed the files and the date of any change.
-
- b) You must cause any work that you distribute or publish, that in
- whole or in part contains or is derived from the Program or any
- part thereof, to be licensed as a whole at no charge to all third
- parties under the terms of this License.
-
- c) If the modified program normally reads commands interactively
- when run, you must cause it, when started running for such
- interactive use in the most ordinary way, to print or display an
- announcement including an appropriate copyright notice and a
- notice that there is no warranty (or else, saying that you provide
- a warranty) and that users may redistribute the program under
- these conditions, and telling the user how to view a copy of this
- License. (Exception: if the Program itself is interactive but
- does not normally print such an announcement, your work based on
- the Program is not required to print an announcement.)
-
-These requirements apply to the modified work as a whole. If
-identifiable sections of that work are not derived from the Program,
-and can be reasonably considered independent and separate works in
-themselves, then this License, and its terms, do not apply to those
-sections when you distribute them as separate works. But when you
-distribute the same sections as part of a whole which is a work based
-on the Program, the distribution of the whole must be on the terms of
-this License, whose permissions for other licensees extend to the
-entire whole, and thus to each and every part regardless of who wrote it.
-
-Thus, it is not the intent of this section to claim rights or contest
-your rights to work written entirely by you; rather, the intent is to
-exercise the right to control the distribution of derivative or
-collective works based on the Program.
-
-In addition, mere aggregation of another work not based on the Program
-with the Program (or with a work based on the Program) on a volume of
-a storage or distribution medium does not bring the other work under
-the scope of this License.
-
- 3. You may copy and distribute the Program (or a work based on it,
-under Section 2) in object code or executable form under the terms of
-Sections 1 and 2 above provided that you also do one of the following:
-
- a) Accompany it with the complete corresponding machine-readable
- source code, which must be distributed under the terms of Sections
- 1 and 2 above on a medium customarily used for software interchange; or,
-
- b) Accompany it with a written offer, valid for at least three
- years, to give any third party, for a charge no more than your
- cost of physically performing source distribution, a complete
- machine-readable copy of the corresponding source code, to be
- distributed under the terms of Sections 1 and 2 above on a medium
- customarily used for software interchange; or,
-
- c) Accompany it with the information you received as to the offer
- to distribute corresponding source code. (This alternative is
- allowed only for noncommercial distribution and only if you
- received the program in object code or executable form with such
- an offer, in accord with Subsection b above.)
-
-The source code for a work means the preferred form of the work for
-making modifications to it. For an executable work, complete source
-code means all the source code for all modules it contains, plus any
-associated interface definition files, plus the scripts used to
-control compilation and installation of the executable. However, as a
-special exception, the source code distributed need not include
-anything that is normally distributed (in either source or binary
-form) with the major components (compiler, kernel, and so on) of the
-operating system on which the executable runs, unless that component
-itself accompanies the executable.
-
-If distribution of executable or object code is made by offering
-access to copy from a designated place, then offering equivalent
-access to copy the source code from the same place counts as
-distribution of the source code, even though third parties are not
-compelled to copy the source along with the object code.
-
- 4. You may not copy, modify, sublicense, or distribute the Program
-except as expressly provided under this License. Any attempt
-otherwise to copy, modify, sublicense or distribute the Program is
-void, and will automatically terminate your rights under this License.
-However, parties who have received copies, or rights, from you under
-this License will not have their licenses terminated so long as such
-parties remain in full compliance.
-
- 5. You are not required to accept this License, since you have not
-signed it. However, nothing else grants you permission to modify or
-distribute the Program or its derivative works. These actions are
-prohibited by law if you do not accept this License. Therefore, by
-modifying or distributing the Program (or any work based on the
-Program), you indicate your acceptance of this License to do so, and
-all its terms and conditions for copying, distributing or modifying
-the Program or works based on it.
-
- 6. Each time you redistribute the Program (or any work based on the
-Program), the recipient automatically receives a license from the
-original licensor to copy, distribute or modify the Program subject to
-these terms and conditions. You may not impose any further
-restrictions on the recipients' exercise of the rights granted herein.
-You are not responsible for enforcing compliance by third parties to
-this License.
-
- 7. If, as a consequence of a court judgment or allegation of patent
-infringement or for any other reason (not limited to patent issues),
-conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not
-excuse you from the conditions of this License. If you cannot
-distribute so as to satisfy simultaneously your obligations under this
-License and any other pertinent obligations, then as a consequence you
-may not distribute the Program at all. For example, if a patent
-license would not permit royalty-free redistribution of the Program by
-all those who receive copies directly or indirectly through you, then
-the only way you could satisfy both it and this License would be to
-refrain entirely from distribution of the Program.
-
-If any portion of this section is held invalid or unenforceable under
-any particular circumstance, the balance of the section is intended to
-apply and the section as a whole is intended to apply in other
-circumstances.
-
-It is not the purpose of this section to induce you to infringe any
-patents or other property right claims or to contest validity of any
-such claims; this section has the sole purpose of protecting the
-integrity of the free software distribution system, which is
-implemented by public license practices. Many people have made
-generous contributions to the wide range of software distributed
-through that system in reliance on consistent application of that
-system; it is up to the author/donor to decide if he or she is willing
-to distribute software through any other system and a licensee cannot
-impose that choice.
-
-This section is intended to make thoroughly clear what is believed to
-be a consequence of the rest of this License.
-
- 8. If the distribution and/or use of the Program is restricted in
-certain countries either by patents or by copyrighted interfaces, the
-original copyright holder who places the Program under this License
-may add an explicit geographical distribution limitation excluding
-those countries, so that distribution is permitted only in or among
-countries not thus excluded. In such case, this License incorporates
-the limitation as if written in the body of this License.
-
- 9. The Free Software Foundation may publish revised and/or new versions
-of the General Public License from time to time. Such new versions will
-be similar in spirit to the present version, but may differ in detail to
-address new problems or concerns.
-
-Each version is given a distinguishing version number. If the Program
-specifies a version number of this License which applies to it and "any
-later version", you have the option of following the terms and conditions
-either of that version or of any later version published by the Free
-Software Foundation. If the Program does not specify a version number of
-this License, you may choose any version ever published by the Free Software
-Foundation.
-
- 10. If you wish to incorporate parts of the Program into other free
-programs whose distribution conditions are different, write to the author
-to ask for permission. For software which is copyrighted by the Free
-Software Foundation, write to the Free Software Foundation; we sometimes
-make exceptions for this. Our decision will be guided by the two goals
-of preserving the free status of all derivatives of our free software and
-of promoting the sharing and reuse of software generally.
-
- NO WARRANTY
-
- 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
-FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
-OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
-PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
-OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
-TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
-PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
-REPAIR OR CORRECTION.
-
- 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
-REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
-INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
-OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
-TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
-YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
-PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
-POSSIBILITY OF SUCH DAMAGES.
-
- END OF TERMS AND CONDITIONS
-
- How to Apply These Terms to Your New Programs
-
- If you develop a new program, and you want it to be of the greatest
-possible use to the public, the best way to achieve this is to make it
-free software which everyone can redistribute and change under these terms.
-
- To do so, attach the following notices to the program. It is safest
-to attach them to the start of each source file to most effectively
-convey the exclusion of warranty; and each file should have at least
-the "copyright" line and a pointer to where the full notice is found.
-
- <one line to give the program's name and a brief idea of what it does.>
- Copyright (C) <year> <name of author>
-
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 2 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License along
- with this program; if not, write to the Free Software Foundation, Inc.,
- 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-Also add information on how to contact you by electronic and paper mail.
-
-If the program is interactive, make it output a short notice like this
-when it starts in an interactive mode:
-
- Gnomovision version 69, Copyright (C) year name of author
- Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
- This is free software, and you are welcome to redistribute it
- under certain conditions; type `show c' for details.
-
-The hypothetical commands `show w' and `show c' should show the appropriate
-parts of the General Public License. Of course, the commands you use may
-be called something other than `show w' and `show c'; they could even be
-mouse-clicks or menu items--whatever suits your program.
-
-You should also get your employer (if you work as a programmer) or your
-school, if any, to sign a "copyright disclaimer" for the program, if
-necessary. Here is a sample; alter the names:
-
- Yoyodyne, Inc., hereby disclaims all copyright interest in the program
- `Gnomovision' (which makes passes at compilers) written by James Hacker.
-
- <signature of Ty Coon>, 1 April 1989
- Ty Coon, President of Vice
-
-This General Public License does not permit incorporating your program into
-proprietary programs. If your program is a subroutine library, you may
-consider it more useful to permit linking proprietary applications with the
-library. If this is what you want to do, use the GNU Lesser General
-Public License instead of this License.
diff --git a/bitbake/ChangeLog b/bitbake/ChangeLog
deleted file mode 100644
index 4ac2a64462..0000000000
--- a/bitbake/ChangeLog
+++ /dev/null
@@ -1,317 +0,0 @@
-Changes in Bitbake 1.9.x:
- - Add PE (Package Epoch) support from Philipp Zabel (pH5)
- - Treat python functions the same as shell functions for logging
- - Use TMPDIR/anonfunc as a __anonfunc temp directory (T)
- - Catch truncated cache file errors
- - Allow operations other than assignment on flag variables
- - Add code to handle inter-task dependencies
- - Fix cache errors when generation dotGraphs
- - Make sure __inherit_cache is updated before calling include() (from Michael Krelin)
- - Fix bug when target was in ASSUME_PROVIDED (#2236)
- - Raise ParseError for filenames with multiple underscores instead of infinitely looping (#2062)
- - Fix invalid regexp in BBMASK error handling (missing import) (#1124)
- - Promote certain warnings from debug to note 2 level
- - Update manual
- - Correctly redirect stdin when forking
- - If parsing errors are found, exit, too many users miss the errors
- - Remove supriours PREFERRED_PROVIDER warnings
- - svn fetcher: Add _buildsvncommand function
- - Improve certain error messages
- - Rewrite svn fetcher to make adding extra operations easier
- as part of future SRCDATE="now" fixes
- (requires new FETCHCMD_svn definition in bitbake.conf)
- - Change SVNDIR layout to be more unique (fixes #2644 and #2624)
- - Add ConfigParsed Event after configuration parsing is complete
- - Add SRCREV support for svn fetcher
- - data.emit_var() - only call getVar if we need the variable
- - Stop generating the A variable (seems to be legacy code)
- - Make sure intertask depends get processed correcting in recursive depends
- - Add pn-PN to overrides when evaluating PREFERRED_VERSION
- - Improve the progress indicator by skipping tasks that have
- already run before starting the build rather than during it
- - Add profiling option (-P)
- - Add BB_SRCREV_POLICY variable (clear or cache) to control SRCREV cache
- - Add SRCREV_FORMAT support
- - Fix local fetcher's localpath return values
- - Apply OVERRIDES before performing immediate expansions
- - Allow the -b -e option combination to take regular expressions
- - Fix handling of variables with expansion in the name using _append/_prepend
- e.g. RRECOMMENDS_${PN}_append_xyz = "abc"
- - Add plain message function to bb.msg
- - Sort the list of providers before processing so dependency problems are
- reproducible rather than effectively random
- - Fix/improve bitbake -s output
- - Add locking for fetchers so only one tries to fetch a given file at a given time
- - Fix int(0)/None confusion in runqueue.py which causes random gaps in dependency chains
- - Expand data in addtasks
- - Print the list of missing DEPENDS,RDEPENDS for the "No buildable providers available for required...."
- error message.
- - Rework add_task to be more efficient (6% speedup, 7% number of function calls reduction)
- - Sort digraph output to make builds more reproducible
- - Split expandKeys into two for loops to benefit from the expand_cache (12% speedup)
- - runqueue.py: Fix idepends handling to avoid dependency errors
- - Clear the terminal TOSTOP flag if set (and warn the user)
- - Fix regression from r653 and make SRCDATE/CVSDATE work for packages again
- - Fix a bug in bb.decodeurl where http://some.where.com/somefile.tgz decoded to host="" (#1530)
- - Warn about malformed PREFERRED_PROVIDERS (#1072)
- - Add support for BB_NICE_LEVEL option (#1627)
- - Psyco is used only on x86 as there is no support for other architectures.
- - Sort initial providers list by default preference (#1145, #2024)
- - Improve provider sorting so prefered versions have preference over latest versions (#768)
- - Detect builds of tasks with overlapping providers and warn (will become a fatal error) (#1359)
- - Add MULTI_PROVIDER_WHITELIST variable to allow known safe multiple providers to be listed
- - Handle paths in svn fetcher module parameter
- - Support the syntax "export VARIABLE"
- - Add bzr fetcher
- - Add support for cleaning directories before a task in the form:
- do_taskname[cleandirs] = "dir"
- - bzr fetcher tweaks from Robert Schuster (#2913)
- - Add mercurial (hg) fetcher from Robert Schuster (#2913)
- - Don't add duplicates to BBPATH
- - Fix preferred_version return values (providers.py)
- - Fix 'depends' flag splitting
- - Fix unexport handling (#3135)
- - Add bb.copyfile function similar to bb.movefile (and improve movefile error reporting)
- - Allow multiple options for deptask flag
- - Use git-fetch instead of git-pull removing any need for merges when
- fetching (we don't care about the index). Fixes fetch errors.
- - Add BB_GENERATE_MIRROR_TARBALLS option, set to 0 to make git fetches
- faster at the expense of not creating mirror tarballs.
- - SRCREV handling updates, improvements and fixes from Poky
- - Add bb.utils.lockfile() and bb.utils.unlockfile() from Poky
- - Add support for task selfstamp and lockfiles flags
- - Disable task number acceleration since it can allow the tasks to run
- out of sequence
- - Improve runqueue code comments
- - Add task scheduler abstraction and some example schedulers
- - Improve circular dependency chain debugging code and user feedback
- - Don't give a stacktrace for invalid tasks, have a user friendly message (#3431)
- - Add support for "-e target" (#3432)
- - Fix shell showdata command (#3259)
- - Fix shell data updating problems (#1880)
- - Properly raise errors for invalid source URI protocols
- - Change the wget fetcher failure handling to avoid lockfile problems
- - Add support for branches in git fetcher (Otavio Salvador, Michael Lauer)
- - Make taskdata and runqueue errors more user friendly
- - Add norecurse and fullpath options to cvs fetcher
- - Fix exit code for build failures in --continue mode
- - Fix git branch tags fetching
- - Change parseConfigurationFile so it works on real data, not a copy
- - Handle 'base' inherit and all other INHERITs from parseConfigurationFile
- instead of BBHandler
- - Fix getVarFlags bug in data_smart
- - Optmise cache handling by more quickly detecting an invalid cache, only
- saving the cache when its changed, moving the cache validity check into
- the parsing loop and factoring some getVar calls outside a for loop
- - Cooker: Remove a debug message from the parsing loop to lower overhead
- - Convert build.py exec_task to use getVarFlags
- - Update shell to use cooker.buildFile
- - Add StampUpdate event
- - Convert -b option to use taskdata/runqueue
- - Remove digraph and switch to new stamp checking code. exec_task no longer
- honours dependencies
- - Make fetcher timestamp updating non-fatal when permissions don't allow
- updates
- - Add BB_SCHEDULER variable/option ("completion" or "speed") controlling
- the way bitbake schedules tasks
- - Add BB_STAMP_POLICY variable/option ("perfile" or "full") controlling
- how extensively stamps are looked at for validity
- - When handling build target failures make sure idepends are checked and
- failed where needed. Fixes --continue mode crashes.
- - Fix -f (force) in conjunction with -b
- - Fix problems with recrdeptask handling where some idepends weren't handled
- correctly.
- - Handle exit codes correctly (from pH5)
- - Work around refs/HEAD issues with git over http (#3410)
- - Add proxy support to the CVS fetcher (from Cyril Chemparathy)
- - Improve runfetchcmd so errors are seen and various GIT variables are exported
- - Add ability to fetchers to check URL validity without downloading
- - Improve runtime PREFERRED_PROVIDERS warning message
- - Add BB_STAMP_WHITELIST option which contains a list of stamps to ignore when
- checking stamp dependencies and using a BB_STAMP_POLICY of "whitelist"
- - No longer weight providers on the basis of a package being "already staged". This
- leads to builds being non-deterministic.
- - Flush stdout/stderr before forking to fix duplicate console output
- - Make sure recrdeps tasks include all inter-task dependencies of a given fn
- - Add bb.runqueue.check_stamp_fn() for use by packaged-staging
- - Add PERSISTENT_DIR to store the PersistData in a persistent
- directory != the cache dir.
- - Add md5 and sha256 checksum generation functions to utils.py
- - Correctly handle '-' characters in class names (#2958)
- - Make sure expandKeys has been called on the data dictionary before running tasks
- - Correctly add a task override in the form task-TASKNAME.
- - Revert the '-' character fix in class names since it breaks things
- - When a regexp fails to compile for PACKAGES_DYNAMIC, print a more useful error (#4444)
- - Allow to checkout CVS by Date and Time. Just add HHmm to the SRCDATE.
- - Move prunedir function to utils.py and add explode_dep_versions function
- - Raise an exception if SRCREV == 'INVALID'
- - Fix hg fetcher username/password handling and fix crash
- - Fix PACKAGES_DYNAMIC handling of packages with '++' in the name
- - Rename __depends to __base_depends after configuration parsing so we don't
- recheck the validity of the config files time after time
- - Add better environmental variable handling. By default it will now only pass certain
- whitelisted variables into the data store. If BB_PRESERVE_ENV is set bitbake will use
- all variable from the environment. If BB_ENV_WHITELIST is set, that whitelist will be
- used instead of the internal bitbake one. Alternatively, BB_ENV_EXTRAWHITE can be used
- to extend the internal whitelist.
- - Perforce fetcher fix to use commandline options instead of being overriden by the environment
- - bb.utils.prunedir can cope with symlinks to directoriees without exceptions
- - use @rev when doing a svn checkout
- - Add osc fetcher (from Joshua Lock in Poky)
- - When SRCREV autorevisioning for a recipe is in use, don't cache the recipe
- - Add tryaltconfigs option to control whether bitbake trys using alternative providers
- to fulfil failed dependencies. It defaults to off, changing the default since this
- behaviour confuses many users and isn't often useful.
- - Improve lock file function error handling
- - Add username handling to the git fetcher (Robert Bragg)
- - Add support for HTTP_PROXY and HTTP_PROXY_IGNORE variables to the wget fetcher
- - Export more variables to the fetcher commands to allow ssh checkouts and checkouts through
- proxies to work better. (from Poky)
- - Also allow user and pswd options in SRC_URIs globally (from Poky)
- - Improve proxy handling when using mirrors (from Poky)
- - Add bb.utils.prune_suffix function
- - Fix hg checkouts of specific revisions (from Poky)
- - Fix wget fetching of urls with parameters specified (from Poky)
- - Add username handling to git fetcher (from Poky)
- - Set HOME environmental variable when running fetcher commands (from Poky)
- - Make sure allowed variables inherited from the environment are exported again (from Poky)
- - When running a stage task in bbshell, run populate_staging, not the stage task (from Poky)
- - Fix + character escaping from PACKAGES_DYNAMIC (thanks Otavio Salvador)
- - Addition of BBCLASSEXTEND support for allowing one recipe to provide multiple targets (from Poky)
-
-Changes in Bitbake 1.8.0:
- - Release 1.7.x as a stable series
-
-Changes in BitBake 1.7.x:
- - Major updates of the dependency handling and execution
- of tasks. Code from bin/bitbake replaced with runqueue.py
- and taskdata.py
- - New task execution code supports multithreading with a simplistic
- threading algorithm controlled by BB_NUMBER_THREADS
- - Change of the SVN Fetcher to keep the checkout around
- courtsey of Paul Sokolovsky (#1367)
- - PATH fix to bbimage (#1108)
- - Allow debug domains to be specified on the commandline (-l)
- - Allow 'interactive' tasks
- - Logging message improvements
- - Drop now uneeded BUILD_ALL_DEPS variable
- - Add support for wildcards to -b option
- - Major overhaul of the fetchers making a large amount of code common
- including mirroring code
- - Fetchers now touch md5 stamps upon access (to show activity)
- - Fix -f force option when used without -b (long standing bug)
- - Add expand_cache to data_cache.py, caching expanded data (speedup)
- - Allow version field in DEPENDS (ignored for now)
- - Add abort flag support to the shell
- - Make inherit fail if the class doesn't exist (#1478)
- - Fix data.emit_env() to expand keynames as well as values
- - Add ssh fetcher
- - Add perforce fetcher
- - Make PREFERRED_PROVIDER_foobar defaults to foobar if available
- - Share the parser's mtime_cache, reducing the number of stat syscalls
- - Compile all anonfuncs at once!
- *** Anonfuncs must now use common spacing format ***
- - Memorise the list of handlers in __BBHANDLERS and tasks in __BBTASKS
- This removes 2 million function calls resulting in a 5-10% speedup
- - Add manpage
- - Update generateDotGraph to use taskData/runQueue improving accuracy
- and also adding a task dependency graph
- - Fix/standardise on GPLv2 licence
- - Move most functionality from bin/bitbake to cooker.py and split into
- separate funcitons
- - CVS fetcher: Added support for non-default port
- - Add BBINCLUDELOGS_LINES, the number of lines to read from any logfile
- - Drop shebangs from lib/bb scripts
-
-Changes in Bitbake 1.6.0:
- - Better msg handling
- - COW dict implementation from Tim Ansell (mithro) leading
- to better performance
- - Speed up of -s
-
-Changes in Bitbake 1.4.4:
- - SRCDATE now handling courtsey Justin Patrin
- - #1017 fix to work with rm_work
-
-Changes in BitBake 1.4.2:
- - Send logs to oe.pastebin.com instead of pastebin.com
- fixes #856
- - Copy the internal bitbake data before building the
- dependency graph. This fixes nano not having a
- virtual/libc dependency
- - Allow multiple TARBALL_STASH entries
- - Cache, check if the directory exists before changing
- into it
- - git speedup cloning by not doing a checkout
- - allow to have spaces in filenames (.conf, .bb, .bbclass)
-
-Changes in BitBake 1.4.0:
- - Fix to check both RDEPENDS and RDEPENDS_${PN}
- - Fix a RDEPENDS parsing bug in utils:explode_deps()
- - Update git fetcher behaviour to match git changes
- - ASSUME_PROVIDED allowed to include runtime packages
- - git fetcher cleanup and efficency improvements
- - Change the format of the cache
- - Update usermanual to document the Fetchers
- - Major changes to caching with a new strategy
- giving a major performance increase when reparsing
- with few data changes
-
-Changes in BitBake 1.3.3:
- - Create a new Fetcher module to ease the
- development of new Fetchers.
- Issue #438 fixed by rpurdie@openedhand.com
- - Make the Subversion fetcher honor the SRC Date
- (CVSDATE).
- Issue #555 fixed by chris@openedhand.com
- - Expand PREFERRED_PROVIDER properly
- Issue #436 fixed by rprudie@openedhand.com
- - Typo fix for Issue #531 by Philipp Zabel for the
- BitBake Shell
- - Introduce a new special variable SRCDATE as
- a generic naming to replace CVSDATE.
- - Introduce a new keyword 'required'. In contrast
- to 'include' parsing will fail if a to be included
- file can not be found.
- - Remove hardcoding of the STAMP directory. Patch
- courtsey pHilipp Zabel
- - Track the RDEPENDS of each package (rpurdie@openedhand.com)
- - Introduce BUILD_ALL_DEPS to build all RDEPENDS. E.g
- this is used by the OpenEmbedded Meta Packages.
- (rpurdie@openedhand.com).
-
-Changes in BitBake 1.3.2:
- - reintegration of make.py into BitBake
- - bbread is gone, use bitbake -e
- - lots of shell updates and bugfixes
- - Introduction of the .= and =. operator
- - Sort variables, keys and groups in bitdoc
- - Fix regression in the handling of BBCOLLECTIONS
- - Update the bitbake usermanual
-
-Changes in BitBake 1.3.0:
- - add bitbake interactive shell (bitbake -i)
- - refactor bitbake utility in OO style
- - kill default arguments in methods in the bb.data module
- - kill default arguments in methods in the bb.fetch module
- - the http/https/ftp fetcher will fail if the to be
- downloaded file was not found in DL_DIR (this is needed
- to avoid unpacking the sourceforge mirror page)
- - Switch to a cow like data instance for persistent and non
- persisting mode (called data_smart.py)
- - Changed the callback of bb.make.collect_bbfiles to carry
- additional parameters
- - Drastically reduced the amount of needed RAM by not holding
- each data instance in memory when using a cache/persistent
- storage
-
-Changes in BitBake 1.2.1:
- The 1.2.1 release is meant as a intermediate release to lay the
- ground for more radical changes. The most notable changes are:
-
- - Do not hardcode {}, use bb.data.init() instead if you want to
- get a instance of a data class
- - bb.data.init() is a factory and the old bb.data methods are delegates
- - Do not use deepcopy use bb.data.createCopy() instead.
- - Removed default arguments in bb.fetch
-
diff --git a/bitbake/HEADER b/bitbake/HEADER
deleted file mode 100644
index 9859255df3..0000000000
--- a/bitbake/HEADER
+++ /dev/null
@@ -1,19 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# <one line to give the program's name and a brief idea of what it does.>
-# Copyright (C) <year> <name of author>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
diff --git a/bitbake/bin/bitbake b/bitbake/bin/bitbake
deleted file mode 100755
index 6d0528953c..0000000000
--- a/bitbake/bin/bitbake
+++ /dev/null
@@ -1,224 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
-# Copyright (C) 2005 Holger Hans Peter Freyther
-# Copyright (C) 2005 ROAD GmbH
-# Copyright (C) 2006 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os
-import sys, logging
-sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(__file__)),
- 'lib'))
-
-import optparse
-import warnings
-from traceback import format_exception
-try:
- import bb
-except RuntimeError, exc:
- sys.exit(str(exc))
-from bb import event
-import bb.msg
-from bb import cooker
-from bb import ui
-from bb import server
-from bb.server import none
-#from bb.server import xmlrpc
-
-__version__ = "1.11.0"
-logger = logging.getLogger("BitBake")
-
-
-class BBConfiguration(object):
- """
- Manages build options and configurations for one run
- """
-
- def __init__(self, options):
- for key, val in options.__dict__.items():
- setattr(self, key, val)
- self.pkgs_to_build = []
-
-
-def get_ui(config):
- if config.ui:
- interface = config.ui
- else:
- interface = 'knotty'
-
- try:
- # Dynamically load the UI based on the ui name. Although we
- # suggest a fixed set this allows you to have flexibility in which
- # ones are available.
- module = __import__("bb.ui", fromlist = [interface])
- return getattr(module, interface).main
- except AttributeError:
- sys.exit("FATAL: Invalid user interface '%s' specified.\n"
- "Valid interfaces: depexp, goggle, ncurses, knotty [default]." % interface)
-
-
-# Display bitbake/OE warnings via the BitBake.Warnings logger, ignoring others"""
-warnlog = logging.getLogger("BitBake.Warnings")
-_warnings_showwarning = warnings.showwarning
-def _showwarning(message, category, filename, lineno, file=None, line=None):
- if file is not None:
- if _warnings_showwarning is not None:
- _warnings_showwarning(message, category, filename, lineno, file, line)
- else:
- s = warnings.formatwarning(message, category, filename, lineno)
- warnlog.warn(s)
-
-warnings.showwarning = _showwarning
-warnings.filterwarnings("ignore")
-warnings.filterwarnings("default", module="(<string>$|(oe|bb)\.)")
-warnings.filterwarnings("ignore", category=PendingDeprecationWarning)
-warnings.filterwarnings("ignore", category=ImportWarning)
-warnings.filterwarnings("ignore", category=DeprecationWarning, module="<string>$")
-warnings.filterwarnings("ignore", message="With-statements now directly support multiple context managers")
-
-
-def main():
- parser = optparse.OptionParser(
- version = "BitBake Build Tool Core version %s, %%prog version %s" % (bb.__version__, __version__),
- usage = """%prog [options] [package ...]
-
-Executes the specified task (default is 'build') for a given set of BitBake files.
-It expects that BBFILES is defined, which is a space separated list of files to
-be executed. BBFILES does support wildcards.
-Default BBFILES are the .bb files in the current directory.""")
-
- parser.add_option("-b", "--buildfile", help = "execute the task against this .bb file, rather than a package from BBFILES.",
- action = "store", dest = "buildfile", default = None)
-
- parser.add_option("-k", "--continue", help = "continue as much as possible after an error. While the target that failed, and those that depend on it, cannot be remade, the other dependencies of these targets can be processed all the same.",
- action = "store_false", dest = "abort", default = True)
-
- parser.add_option("-a", "--tryaltconfigs", help = "continue with builds by trying to use alternative providers where possible.",
- action = "store_true", dest = "tryaltconfigs", default = False)
-
- parser.add_option("-f", "--force", help = "force run of specified cmd, regardless of stamp status",
- action = "store_true", dest = "force", default = False)
-
- parser.add_option("-c", "--cmd", help = "Specify task to execute. Note that this only executes the specified task for the providee and the packages it depends on, i.e. 'compile' does not implicitly call stage for the dependencies (IOW: use only if you know what you are doing). Depending on the base.bbclass a listtasks tasks is defined and will show available tasks",
- action = "store", dest = "cmd")
-
- parser.add_option("-r", "--read", help = "read the specified file before bitbake.conf",
- action = "append", dest = "file", default = [])
-
- parser.add_option("-v", "--verbose", help = "output more chit-chat to the terminal",
- action = "store_true", dest = "verbose", default = False)
-
- parser.add_option("-D", "--debug", help = "Increase the debug level. You can specify this more than once.",
- action = "count", dest="debug", default = 0)
-
- parser.add_option("-n", "--dry-run", help = "don't execute, just go through the motions",
- action = "store_true", dest = "dry_run", default = False)
-
- parser.add_option("-S", "--dump-signatures", help = "don't execute, just dump out the signature construction information",
- action = "store_true", dest = "dump_signatures", default = False)
-
- parser.add_option("-p", "--parse-only", help = "quit after parsing the BB files (developers only)",
- action = "store_true", dest = "parse_only", default = False)
-
- parser.add_option("-d", "--disable-psyco", help = "disable using the psyco just-in-time compiler (not recommended)",
- action = "store_true", dest = "disable_psyco", default = False)
-
- parser.add_option("-s", "--show-versions", help = "show current and preferred versions of all packages",
- action = "store_true", dest = "show_versions", default = False)
-
- parser.add_option("-e", "--environment", help = "show the global or per-package environment (this is what used to be bbread)",
- action = "store_true", dest = "show_environment", default = False)
-
- parser.add_option("-g", "--graphviz", help = "emit the dependency trees of the specified packages in the dot syntax",
- action = "store_true", dest = "dot_graph", default = False)
-
- parser.add_option("-I", "--ignore-deps", help = """Assume these dependencies don't exist and are already provided (equivalent to ASSUME_PROVIDED). Useful to make dependency graphs more appealing""",
- action = "append", dest = "extra_assume_provided", default = [])
-
- parser.add_option("-l", "--log-domains", help = """Show debug logging for the specified logging domains""",
- action = "append", dest = "debug_domains", default = [])
-
- parser.add_option("-P", "--profile", help = "profile the command and print a report",
- action = "store_true", dest = "profile", default = False)
-
- parser.add_option("-u", "--ui", help = "userinterface to use",
- action = "store", dest = "ui")
-
- parser.add_option("", "--revisions-changed", help = "Set the exit code depending on whether upstream floating revisions have changed or not",
- action = "store_true", dest = "revisions_changed", default = False)
-
- options, args = parser.parse_args(sys.argv)
-
- configuration = BBConfiguration(options)
- configuration.pkgs_to_build.extend(args[1:])
- configuration.initial_path = os.environ['PATH']
-
- ui_main = get_ui(configuration)
-
- loghandler = event.LogHandler()
- logger.addHandler(loghandler)
-
- #server = bb.server.xmlrpc
- server = bb.server.none
-
- # Save a logfile for cooker into the current working directory. When the
- # server is daemonized this logfile will be truncated.
- cooker_logfile = os.path.join(os.getcwd(), "cooker.log")
-
- bb.utils.init_logger(bb.msg, configuration.verbose, configuration.debug,
- configuration.debug_domains)
-
- # Clear away any spurious environment variables. But don't wipe the
- # environment totally. This is necessary to ensure the correct operation
- # of the UIs (e.g. for DISPLAY, etc.)
- bb.utils.clean_environment()
-
- cooker = bb.cooker.BBCooker(configuration, server)
- cooker.parseCommandLine()
-
- serverinfo = server.BitbakeServerInfo(cooker.server)
-
- server.BitBakeServerFork(cooker, cooker.server, serverinfo, cooker_logfile)
- del cooker
-
- logger.removeHandler(loghandler)
-
- # Setup a connection to the server (cooker)
- server_connection = server.BitBakeServerConnection(serverinfo)
-
- # Launch the UI
- if configuration.ui:
- ui = configuration.ui
- else:
- ui = "knotty"
-
- try:
- return server.BitbakeUILauch().launch(serverinfo, ui_main, server_connection.connection, server_connection.events)
- finally:
- server_connection.terminate()
-
-if __name__ == "__main__":
- try:
- ret = main()
- except Exception:
- ret = 1
- import traceback
- traceback.print_exc(5)
- sys.exit(ret)
diff --git a/bitbake/bin/bitbake-diffsigs b/bitbake/bin/bitbake-diffsigs
deleted file mode 100755
index 5eb77ce59d..0000000000
--- a/bitbake/bin/bitbake-diffsigs
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/env python
-import os
-import sys
-import warnings
-sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
-
-import bb.siggen
-
-if len(sys.argv) > 2:
- bb.siggen.compare_sigfiles(sys.argv[1], sys.argv[2])
-else:
- bb.siggen.dump_sigfile(sys.argv[1])
diff --git a/bitbake/bin/bitbake-layers b/bitbake/bin/bitbake-layers
deleted file mode 100644
index ed11e5a386..0000000000
--- a/bitbake/bin/bitbake-layers
+++ /dev/null
@@ -1,154 +0,0 @@
-#!/usr/bin/env python2.6
-
-import cmd
-import logging
-import os.path
-import sys
-
-bindir = os.path.dirname(__file__)
-topdir = os.path.dirname(bindir)
-sys.path[0:0] = [os.path.join(topdir, 'lib')]
-
-import bb.cache
-import bb.cooker
-import bb.providers
-from bb.cooker import state
-
-
-logger = logging.getLogger('BitBake')
-default_cmd = 'show_appends'
-
-
-def main(args):
- logging.basicConfig(format='%(levelname)s: %(message)s')
- bb.utils.clean_environment()
-
- cmds = Commands()
- if args:
- cmds.onecmd(' '.join(args))
- else:
- cmds.onecmd(default_cmd)
- return cmds.returncode
-
-
-class Commands(cmd.Cmd):
- def __init__(self):
- cmd.Cmd.__init__(self)
-
- self.returncode = 0
- self.config = Config(parse_only=True)
- self.cooker = bb.cooker.BBCooker(self.config,
- self.register_idle_function)
- self.config_data = self.cooker.configuration.data
- bb.providers.logger.setLevel(logging.ERROR)
- self.prepare_cooker()
-
- def register_idle_function(self, function, data):
- pass
-
- def prepare_cooker(self):
- sys.stderr.write("Parsing recipes..")
- logger.setLevel(logging.ERROR)
-
- try:
- while self.cooker.state in (state.initial, state.parsing):
- self.cooker.updateCache()
- except KeyboardInterrupt:
- self.cooker.shutdown()
- self.cooker.updateCache()
- sys.exit(2)
-
- logger.setLevel(logging.INFO)
- sys.stderr.write("done.\n")
-
- self.cooker_data = self.cooker.status
- self.cooker_data.appends = self.cooker.appendlist
-
- def do_show_layers(self, args):
- logger.info(str(self.config_data.getVar('BBLAYERS', True)))
-
- def do_show_appends(self, args):
- if not self.cooker_data.appends:
- logger.info('No append files found')
- return
-
- logger.info('State of append files:')
-
- for pn in self.cooker_data.pkg_pn:
- self.show_appends_for_pn(pn)
-
- self.show_appends_with_no_recipes()
-
- def show_appends_for_pn(self, pn):
- filenames = self.cooker_data.pkg_pn[pn]
-
- best = bb.providers.findBestProvider(pn,
- self.cooker.configuration.data,
- self.cooker_data,
- self.cooker_data.pkg_pn)
- best_filename = os.path.basename(best[3])
-
- appended, missing = self.get_appends_for_files(filenames)
- if appended:
- for basename, appends in appended:
- logger.info('%s:', basename)
- for append in appends:
- logger.info(' %s', append)
-
- if best_filename in missing:
- logger.warn('%s: missing append for preferred version',
- best_filename)
- self.returncode |= 1
-
- def get_appends_for_files(self, filenames):
- appended, notappended = set(), set()
- for filename in filenames:
- _, cls = bb.cache.Cache.virtualfn2realfn(filename)
- if cls:
- continue
-
- basename = os.path.basename(filename)
- appends = self.cooker_data.appends.get(basename)
- if appends:
- appended.add((basename, frozenset(appends)))
- else:
- notappended.add(basename)
- return appended, notappended
-
- def show_appends_with_no_recipes(self):
- recipes = set(os.path.basename(f)
- for f in self.cooker_data.pkg_fn.iterkeys())
- appended_recipes = self.cooker_data.appends.iterkeys()
- appends_without_recipes = [self.cooker_data.appends[recipe]
- for recipe in appended_recipes
- if recipe not in recipes]
- if appends_without_recipes:
- appendlines = (' %s' % append
- for appends in appends_without_recipes
- for append in appends)
- logger.warn('No recipes available for:\n%s',
- '\n'.join(appendlines))
- self.returncode |= 4
-
- def do_EOF(self, line):
- return True
-
-
-class Config(object):
- def __init__(self, **options):
- self.pkgs_to_build = []
- self.debug_domains = []
- self.extra_assume_provided = []
- self.file = []
- self.debug = 0
- self.__dict__.update(options)
-
- def __getattr__(self, attribute):
- try:
- return super(Config, self).__getattribute__(attribute)
- except AttributeError:
- return None
-
-
-if __name__ == '__main__':
- sys.exit(main(sys.argv[1:]) or 0)
diff --git a/bitbake/bin/bitbake-runtask b/bitbake/bin/bitbake-runtask
deleted file mode 100755
index bee0f429ff..0000000000
--- a/bitbake/bin/bitbake-runtask
+++ /dev/null
@@ -1,120 +0,0 @@
-#!/usr/bin/env python
-
-import os
-import sys
-import warnings
-sys.path.insert(0, os.path.join(os.path.dirname(os.path.dirname(sys.argv[0])), 'lib'))
-
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- bb.msg.note(1, bb.msg.domain.Cache, "Importing cPickle failed. Falling back to a very slow implementation.")
-
-class BBConfiguration(object):
- """
- Manages build options and configurations for one run
- """
-
- def __init__(self, debug, debug_domains):
- setattr(self, "data", {})
- setattr(self, "file", [])
- setattr(self, "cmd", None)
- setattr(self, "dump_signatures", True)
- setattr(self, "debug", debug)
- setattr(self, "debug_domains", debug_domains)
-
-_warnings_showwarning = warnings.showwarning
-def _showwarning(message, category, filename, lineno, file=None, line=None):
- """Display python warning messages using bb.msg"""
- if file is not None:
- if _warnings_showwarning is not None:
- _warnings_showwarning(message, category, filename, lineno, file, line)
- else:
- s = warnings.formatwarning(message, category, filename, lineno)
- s = s.split("\n")[0]
- bb.msg.warn(None, s)
-
-warnings.showwarning = _showwarning
-warnings.simplefilter("ignore", DeprecationWarning)
-
-import bb.event
-
-# Need to map our I/O correctly. stdout is a pipe to the server expecting
-# events. We save this and then map stdout to stderr.
-
-eventfd = os.dup(sys.stdout.fileno())
-bb.event.worker_pipe = os.fdopen(eventfd, 'w', 0)
-
-# map stdout to stderr
-os.dup2(sys.stderr.fileno(), sys.stdout.fileno())
-
-# Replace those fds with our own
-#logout = data.expand("${TMPDIR}/log/stdout.%s" % os.getpid(), self.cfgData, True)
-#mkdirhier(os.path.dirname(logout))
-#newso = open("/tmp/stdout.%s" % os.getpid(), 'w')
-#os.dup2(newso.fileno(), sys.stdout.fileno())
-#os.dup2(newso.fileno(), sys.stderr.fileno())
-
-# Don't read from stdin from the parent
-si = file("/dev/null", 'r')
-os.dup2(si.fileno( ), sys.stdin.fileno( ))
-
-# We don't want to see signals to our parent, e.g. Ctrl+C
-os.setpgrp()
-
-# Save out the PID so that the event can include it the
-# events
-bb.event.worker_pid = os.getpid()
-bb.event.useStdout = False
-
-hashfile = sys.argv[1]
-buildfile = sys.argv[2]
-taskname = sys.argv[3]
-
-import bb.cooker
-
-p = pickle.Unpickler(file(hashfile, "rb"))
-hashdata = p.load()
-
-debug = hashdata["msg-debug"]
-debug_domains = hashdata["msg-debug-domains"]
-verbose = hashdata["verbose"]
-
-bb.utils.init_logger(bb.msg, verbose, debug, debug_domains)
-
-cooker = bb.cooker.BBCooker(BBConfiguration(debug, debug_domains), None)
-cooker.parseConfiguration()
-
-cooker.bb_cache = bb.cache.init(cooker)
-cooker.status = bb.cache.CacheData()
-
-(fn, cls) = cooker.bb_cache.virtualfn2realfn(buildfile)
-buildfile = cooker.matchFile(fn)
-fn = cooker.bb_cache.realfn2virtual(buildfile, cls)
-
-cooker.buildSetVars()
-
-# Load data into the cache for fn and parse the loaded cache data
-the_data = cooker.bb_cache.loadDataFull(fn, cooker.get_file_appends(fn), cooker.configuration.data)
-cooker.bb_cache.setData(fn, buildfile, the_data)
-cooker.bb_cache.handle_data(fn, cooker.status)
-
-#exportlist = bb.utils.preserved_envvars_export_list()
-#bb.utils.filter_environment(exportlist)
-
-if taskname.endswith("_setscene"):
- the_data.setVarFlag(taskname, "quieterrors", "1")
-
-bb.parse.siggen.set_taskdata(hashdata["hashes"], hashdata["deps"])
-
-for h in hashdata["hashes"]:
- bb.data.setVar("BBHASH_%s" % h, hashdata["hashes"][h], the_data)
-for h in hashdata["deps"]:
- bb.data.setVar("BBHASHDEPS_%s" % h, hashdata["deps"][h], the_data)
-
-ret = 0
-if sys.argv[4] != "True":
- ret = bb.build.exec_task(fn, taskname, the_data)
-sys.exit(ret)
-
diff --git a/bitbake/bin/bitdoc b/bitbake/bin/bitdoc
deleted file mode 100755
index c2a7061d1b..0000000000
--- a/bitbake/bin/bitdoc
+++ /dev/null
@@ -1,532 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# Copyright (C) 2005 Holger Hans Peter Freyther
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import optparse, os, sys
-
-# bitbake
-sys.path.append(os.path.join(os.path.dirname(os.path.dirname(__file__), 'lib'))
-import bb
-import bb.parse
-from string import split, join
-
-__version__ = "0.0.2"
-
-class HTMLFormatter:
- """
- Simple class to help to generate some sort of HTML files. It is
- quite inferior solution compared to docbook, gtkdoc, doxygen but it
- should work for now.
- We've a global introduction site (index.html) and then one site for
- the list of keys (alphabetical sorted) and one for the list of groups,
- one site for each key with links to the relations and groups.
-
- index.html
- all_keys.html
- all_groups.html
- groupNAME.html
- keyNAME.html
- """
-
- def replace(self, text, *pairs):
- """
- From pydoc... almost identical at least
- """
- while pairs:
- (a, b) = pairs[0]
- text = join(split(text, a), b)
- pairs = pairs[1:]
- return text
- def escape(self, text):
- """
- Escape string to be conform HTML
- """
- return self.replace(text,
- ('&', '&amp;'),
- ('<', '&lt;' ),
- ('>', '&gt;' ) )
- def createNavigator(self):
- """
- Create the navgiator
- """
- return """<table class="navigation" width="100%" summary="Navigation header" cellpadding="2" cellspacing="2">
-<tr valign="middle">
-<td><a accesskey="g" href="index.html">Home</a></td>
-<td><a accesskey="n" href="all_groups.html">Groups</a></td>
-<td><a accesskey="u" href="all_keys.html">Keys</a></td>
-</tr></table>
-"""
-
- def relatedKeys(self, item):
- """
- Create HTML to link to foreign keys
- """
-
- if len(item.related()) == 0:
- return ""
-
- txt = "<p><b>See also:</b><br>"
- txts = []
- for it in item.related():
- txts.append("""<a href="key%(it)s.html">%(it)s</a>""" % vars() )
-
- return txt + ",".join(txts)
-
- def groups(self, item):
- """
- Create HTML to link to related groups
- """
-
- if len(item.groups()) == 0:
- return ""
-
-
- txt = "<p><b>See also:</b><br>"
- txts = []
- for group in item.groups():
- txts.append( """<a href="group%s.html">%s</a> """ % (group, group) )
-
- return txt + ",".join(txts)
-
-
- def createKeySite(self, item):
- """
- Create a site for a key. It contains the header/navigator, a heading,
- the description, links to related keys and to the groups.
- """
-
- return """<!doctype html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
-<html><head><title>Key %s</title></head>
-<link rel="stylesheet" href="style.css" type="text/css">
-<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
-%s
-<h2><span class="refentrytitle">%s</span></h2>
-
-<div class="refsynopsisdiv">
-<h2>Synopsis</h2>
-<p>
-%s
-</p>
-</div>
-
-<div class="refsynopsisdiv">
-<h2>Related Keys</h2>
-<p>
-%s
-</p>
-</div>
-
-<div class="refsynopsisdiv">
-<h2>Groups</h2>
-<p>
-%s
-</p>
-</div>
-
-
-</body>
-""" % (item.name(), self.createNavigator(), item.name(),
- self.escape(item.description()), self.relatedKeys(item), self.groups(item))
-
- def createGroupsSite(self, doc):
- """
- Create the Group Overview site
- """
-
- groups = ""
- sorted_groups = sorted(doc.groups())
- for group in sorted_groups:
- groups += """<a href="group%s.html">%s</a><br>""" % (group, group)
-
- return """<!doctype html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
-<html><head><title>Group overview</title></head>
-<link rel="stylesheet" href="style.css" type="text/css">
-<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
-%s
-<h2>Available Groups</h2>
-%s
-</body>
-""" % (self.createNavigator(), groups)
-
- def createIndex(self):
- """
- Create the index file
- """
-
- return """<!doctype html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
-<html><head><title>Bitbake Documentation</title></head>
-<link rel="stylesheet" href="style.css" type="text/css">
-<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
-%s
-<h2>Documentation Entrance</h2>
-<a href="all_groups.html">All available groups</a><br>
-<a href="all_keys.html">All available keys</a><br>
-</body>
-""" % self.createNavigator()
-
- def createKeysSite(self, doc):
- """
- Create Overview of all avilable keys
- """
- keys = ""
- sorted_keys = sorted(doc.doc_keys())
- for key in sorted_keys:
- keys += """<a href="key%s.html">%s</a><br>""" % (key, key)
-
- return """<!doctype html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
-<html><head><title>Key overview</title></head>
-<link rel="stylesheet" href="style.css" type="text/css">
-<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
-%s
-<h2>Available Keys</h2>
-%s
-</body>
-""" % (self.createNavigator(), keys)
-
- def createGroupSite(self, gr, items, _description = None):
- """
- Create a site for a group:
- Group the name of the group, items contain the name of the keys
- inside this group
- """
- groups = ""
- description = ""
-
- # create a section with the group descriptions
- if _description:
- description += "<h2 Description of Grozp %s</h2>" % gr
- description += _description
-
- items.sort(lambda x, y:cmp(x.name(), y.name()))
- for group in items:
- groups += """<a href="key%s.html">%s</a><br>""" % (group.name(), group.name())
-
- return """<!doctype html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
-<html><head><title>Group %s</title></head>
-<link rel="stylesheet" href="style.css" type="text/css">
-<body bgcolor="white" text="black" link="#0000FF" vlink="#840084" alink="#0000FF">
-%s
-%s
-<div class="refsynopsisdiv">
-<h2>Keys in Group %s</h2>
-<pre class="synopsis">
-%s
-</pre>
-</div>
-</body>
-""" % (gr, self.createNavigator(), description, gr, groups)
-
-
-
- def createCSS(self):
- """
- Create the CSS file
- """
- return """.synopsis, .classsynopsis
-{
- background: #eeeeee;
- border: solid 1px #aaaaaa;
- padding: 0.5em;
-}
-.programlisting
-{
- background: #eeeeff;
- border: solid 1px #aaaaff;
- padding: 0.5em;
-}
-.variablelist
-{
- padding: 4px;
- margin-left: 3em;
-}
-.variablelist td:first-child
-{
- vertical-align: top;
-}
-table.navigation
-{
- background: #ffeeee;
- border: solid 1px #ffaaaa;
- margin-top: 0.5em;
- margin-bottom: 0.5em;
-}
-.navigation a
-{
- color: #770000;
-}
-.navigation a:visited
-{
- color: #550000;
-}
-.navigation .title
-{
- font-size: 200%;
-}
-div.refnamediv
-{
- margin-top: 2em;
-}
-div.gallery-float
-{
- float: left;
- padding: 10px;
-}
-div.gallery-float img
-{
- border-style: none;
-}
-div.gallery-spacer
-{
- clear: both;
-}
-a
-{
- text-decoration: none;
-}
-a:hover
-{
- text-decoration: underline;
- color: #FF0000;
-}
-"""
-
-
-
-class DocumentationItem:
- """
- A class to hold information about a configuration
- item. It contains the key name, description, a list of related names,
- and the group this item is contained in.
- """
-
- def __init__(self):
- self._groups = []
- self._related = []
- self._name = ""
- self._desc = ""
-
- def groups(self):
- return self._groups
-
- def name(self):
- return self._name
-
- def description(self):
- return self._desc
-
- def related(self):
- return self._related
-
- def setName(self, name):
- self._name = name
-
- def setDescription(self, desc):
- self._desc = desc
-
- def addGroup(self, group):
- self._groups.append(group)
-
- def addRelation(self, relation):
- self._related.append(relation)
-
- def sort(self):
- self._related.sort()
- self._groups.sort()
-
-
-class Documentation:
- """
- Holds the documentation... with mappings from key to items...
- """
-
- def __init__(self):
- self.__keys = {}
- self.__groups = {}
-
- def insert_doc_item(self, item):
- """
- Insert the Doc Item into the internal list
- of representation
- """
- item.sort()
- self.__keys[item.name()] = item
-
- for group in item.groups():
- if not group in self.__groups:
- self.__groups[group] = []
- self.__groups[group].append(item)
- self.__groups[group].sort()
-
-
- def doc_item(self, key):
- """
- Return the DocumentationInstance describing the key
- """
- try:
- return self.__keys[key]
- except KeyError:
- return None
-
- def doc_keys(self):
- """
- Return the documented KEYS (names)
- """
- return self.__keys.keys()
-
- def groups(self):
- """
- Return the names of available groups
- """
- return self.__groups.keys()
-
- def group_content(self, group_name):
- """
- Return a list of keys/names that are in a specefic
- group or the empty list
- """
- try:
- return self.__groups[group_name]
- except KeyError:
- return []
-
-
-def parse_cmdline(args):
- """
- Parse the CMD line and return the result as a n-tuple
- """
-
- parser = optparse.OptionParser( version = "Bitbake Documentation Tool Core version %s, %%prog version %s" % (bb.__version__, __version__))
- usage = """%prog [options]
-
-Create a set of html pages (documentation) for a bitbake.conf....
-"""
-
- # Add the needed options
- parser.add_option( "-c", "--config", help = "Use the specified configuration file as source",
- action = "store", dest = "config", default = os.path.join("conf", "documentation.conf") )
-
- parser.add_option( "-o", "--output", help = "Output directory for html files",
- action = "store", dest = "output", default = "html/" )
-
- parser.add_option( "-D", "--debug", help = "Increase the debug level",
- action = "count", dest = "debug", default = 0 )
-
- parser.add_option( "-v", "--verbose", help = "output more chit-char to the terminal",
- action = "store_true", dest = "verbose", default = False )
-
- options, args = parser.parse_args( sys.argv )
-
- if options.debug:
- bb.msg.set_debug_level(options.debug)
-
- return options.config, options.output
-
-def main():
- """
- The main Method
- """
-
- (config_file, output_dir) = parse_cmdline( sys.argv )
-
- # right to let us load the file now
- try:
- documentation = bb.parse.handle( config_file, bb.data.init() )
- except IOError:
- bb.fatal( "Unable to open %s" % config_file )
- except bb.parse.ParseError:
- bb.fatal( "Unable to parse %s" % config_file )
-
- if isinstance(documentation, dict):
- documentation = documentation[""]
-
- # Assuming we've the file loaded now, we will initialize the 'tree'
- doc = Documentation()
-
- # defined states
- state_begin = 0
- state_see = 1
- state_group = 2
-
- for key in bb.data.keys(documentation):
- data = bb.data.getVarFlag(key, "doc", documentation)
- if not data:
- continue
-
- # The Documentation now starts
- doc_ins = DocumentationItem()
- doc_ins.setName(key)
-
-
- tokens = data.split(' ')
- state = state_begin
- string= ""
- for token in tokens:
- token = token.strip(',')
-
- if not state == state_see and token == "@see":
- state = state_see
- continue
- elif not state == state_group and token == "@group":
- state = state_group
- continue
-
- if state == state_begin:
- string += " %s" % token
- elif state == state_see:
- doc_ins.addRelation(token)
- elif state == state_group:
- doc_ins.addGroup(token)
-
- # set the description
- doc_ins.setDescription(string)
- doc.insert_doc_item(doc_ins)
-
- # let us create the HTML now
- bb.mkdirhier(output_dir)
- os.chdir(output_dir)
-
- # Let us create the sites now. We do it in the following order
- # Start with the index.html. It will point to sites explaining all
- # keys and groups
- html_slave = HTMLFormatter()
-
- f = file('style.css', 'w')
- print >> f, html_slave.createCSS()
-
- f = file('index.html', 'w')
- print >> f, html_slave.createIndex()
-
- f = file('all_groups.html', 'w')
- print >> f, html_slave.createGroupsSite(doc)
-
- f = file('all_keys.html', 'w')
- print >> f, html_slave.createKeysSite(doc)
-
- # now for each group create the site
- for group in doc.groups():
- f = file('group%s.html' % group, 'w')
- print >> f, html_slave.createGroupSite(group, doc.group_content(group))
-
- # now for the keys
- for key in doc.doc_keys():
- f = file('key%s.html' % doc.doc_item(key).name(), 'w')
- print >> f, html_slave.createKeySite(doc.doc_item(key))
-
-
-if __name__ == "__main__":
- main()
diff --git a/bitbake/contrib/README b/bitbake/contrib/README
deleted file mode 100644
index 25e5156619..0000000000
--- a/bitbake/contrib/README
+++ /dev/null
@@ -1 +0,0 @@
-This directory is for additional contributed files which may be useful.
diff --git a/bitbake/contrib/bbdev.sh b/bitbake/contrib/bbdev.sh
deleted file mode 100644
index 33a78531e1..0000000000
--- a/bitbake/contrib/bbdev.sh
+++ /dev/null
@@ -1,31 +0,0 @@
-# This is a shell function to be sourced into your shell or placed in your .profile,
-# which makes setting things up for BitBake a bit easier.
-#
-# The author disclaims copyright to the contents of this file and places it in the
-# public domain.
-
-bbdev () {
- local BBDIR PKGDIR BUILDDIR
- if test x"$1" = "x--help"; then echo >&2 "syntax: bbdev [bbdir [pkgdir [builddir]]]"; return 1; fi
- if test x"$1" = x; then BBDIR=`pwd`; else BBDIR=$1; fi
- if test x"$2" = x; then PKGDIR=`pwd`; else PKGDIR=$2; fi
- if test x"$3" = x; then BUILDDIR=`pwd`; else BUILDDIR=$3; fi
-
- BBDIR=`readlink -f $BBDIR`
- PKGDIR=`readlink -f $PKGDIR`
- BUILDDIR=`readlink -f $BUILDDIR`
- if ! (test -d $BBDIR && test -d $PKGDIR && test -d $BUILDDIR); then
- echo >&2 "syntax: bbdev [bbdir [pkgdir [builddir]]]"
- return 1
- fi
-
- PATH=$BBDIR/bin:$PATH
- BBPATH=$BBDIR
- if test x"$BBDIR" != x"$PKGDIR"; then
- BBPATH=$PKGDIR:$BBPATH
- fi
- if test x"$PKGDIR" != x"$BUILDDIR"; then
- BBPATH=$BUILDDIR:$BBPATH
- fi
- export BBPATH
-}
diff --git a/bitbake/contrib/vim/ftdetect/bitbake.vim b/bitbake/contrib/vim/ftdetect/bitbake.vim
deleted file mode 100644
index 179e4d9888..0000000000
--- a/bitbake/contrib/vim/ftdetect/bitbake.vim
+++ /dev/null
@@ -1,24 +0,0 @@
-" Vim filetype detection file
-" Language: BitBake
-" Author: Ricardo Salveti <rsalveti@rsalveti.net>
-" Copyright: Copyright (C) 2008 Ricardo Salveti <rsalveti@rsalveti.net>
-" Licence: You may redistribute this under the same terms as Vim itself
-"
-" This sets up the syntax highlighting for BitBake files, like .bb, .bbclass and .inc
-
-if &compatible || version < 600
- finish
-endif
-
-" .bb and .bbclass
-au BufNewFile,BufRead *.b{b,bclass} set filetype=bitbake
-
-" .inc
-au BufNewFile,BufRead *.inc set filetype=bitbake
-
-" .conf
-au BufNewFile,BufRead *.conf
- \ if (match(expand("%:p:h"), "conf") > 0) |
- \ set filetype=bitbake |
- \ endif
-
diff --git a/bitbake/contrib/vim/ftplugin/bitbake.vim b/bitbake/contrib/vim/ftplugin/bitbake.vim
deleted file mode 100644
index ed69d3b1b0..0000000000
--- a/bitbake/contrib/vim/ftplugin/bitbake.vim
+++ /dev/null
@@ -1 +0,0 @@
-set sts=4 sw=4 et
diff --git a/bitbake/contrib/vim/plugin/newbb.vim b/bitbake/contrib/vim/plugin/newbb.vim
deleted file mode 100755
index afba1d9aa4..0000000000
--- a/bitbake/contrib/vim/plugin/newbb.vim
+++ /dev/null
@@ -1,85 +0,0 @@
-" Vim plugin file
-" Purpose: Create a template for new bb files
-" Author: Ricardo Salveti <rsalveti@gmail.com>
-" Copyright: Copyright (C) 2008 Ricardo Salveti <rsalveti@gmail.com>
-"
-" This file is licensed under the MIT license, see COPYING.MIT in
-" this source distribution for the terms.
-"
-" Based on the gentoo-syntax package
-"
-" Will try to use git to find the user name and email
-
-if &compatible || v:version < 600
- finish
-endif
-
-fun! <SID>GetUserName()
- let l:user_name = system("git-config --get user.name")
- if v:shell_error
- return "Unknow User"
- else
- return substitute(l:user_name, "\n", "", "")
-endfun
-
-fun! <SID>GetUserEmail()
- let l:user_email = system("git-config --get user.email")
- if v:shell_error
- return "unknow@user.org"
- else
- return substitute(l:user_email, "\n", "", "")
-endfun
-
-fun! BBHeader()
- let l:current_year = strftime("%Y")
- let l:user_name = <SID>GetUserName()
- let l:user_email = <SID>GetUserEmail()
- 0 put ='# Copyright (C) ' . l:current_year .
- \ ' ' . l:user_name . ' <' . l:user_email . '>'
- put ='# Released under the MIT license (see COPYING.MIT for the terms)'
- $
-endfun
-
-fun! NewBBTemplate()
- let l:paste = &paste
- set nopaste
-
- " Get the header
- call BBHeader()
-
- " New the bb template
- put ='DESCRIPTION = \"\"'
- put ='HOMEPAGE = \"\"'
- put ='LICENSE = \"\"'
- put ='SECTION = \"\"'
- put ='DEPENDS = \"\"'
- put ='PR = \"r0\"'
- put =''
- put ='SRC_URI = \"\"'
-
- " Go to the first place to edit
- 0
- /^DESCRIPTION =/
- exec "normal 2f\""
-
- if paste == 1
- set paste
- endif
-endfun
-
-if !exists("g:bb_create_on_empty")
- let g:bb_create_on_empty = 1
-endif
-
-" disable in case of vimdiff
-if v:progname =~ "vimdiff"
- let g:bb_create_on_empty = 0
-endif
-
-augroup NewBB
- au BufNewFile *.bb
- \ if g:bb_create_on_empty |
- \ call NewBBTemplate() |
- \ endif
-augroup END
-
diff --git a/bitbake/contrib/vim/syntax/bitbake.vim b/bitbake/contrib/vim/syntax/bitbake.vim
deleted file mode 100644
index a06dd9e0ac..0000000000
--- a/bitbake/contrib/vim/syntax/bitbake.vim
+++ /dev/null
@@ -1,123 +0,0 @@
-" Vim syntax file
-" Language: BitBake bb/bbclasses/inc
-" Author: Chris Larson <kergoth@handhelds.org>
-" Ricardo Salveti <rsalveti@rsalveti.net>
-" Copyright: Copyright (C) 2004 Chris Larson <kergoth@handhelds.org>
-" Copyright (C) 2008 Ricardo Salveti <rsalveti@rsalveti.net>
-"
-" This file is licensed under the MIT license, see COPYING.MIT in
-" this source distribution for the terms.
-"
-" Syntax highlighting for bb, bbclasses and inc files.
-"
-" It's an entirely new type, just has specific syntax in shell and python code
-
-if &compatible || v:version < 600
- finish
-endif
-if exists("b:current_syntax")
- finish
-endif
-
-syn include @python syntax/python.vim
-if exists("b:current_syntax")
- unlet b:current_syntax
-endif
-
-" BitBake syntax
-
-" Matching case
-syn case match
-
-" Indicates the error when nothing is matched
-syn match bbUnmatched "."
-
-" Comments
-syn cluster bbCommentGroup contains=bbTodo,@Spell
-syn keyword bbTodo COMBAK FIXME TODO XXX contained
-syn match bbComment "#.*$" contains=@bbCommentGroup
-
-" String helpers
-syn match bbQuote +['"]+ contained
-syn match bbDelimiter "[(){}=]" contained
-syn match bbArrayBrackets "[\[\]]" contained
-
-" BitBake strings
-syn match bbContinue "\\$"
-syn region bbString matchgroup=bbQuote start=+"+ skip=+\\$+ excludenl end=+"+ contained keepend contains=bbTodo,bbContinue,bbVarDeref,bbVarPyValue,@Spell
-syn region bbString matchgroup=bbQuote start=+'+ skip=+\\$+ excludenl end=+'+ contained keepend contains=bbTodo,bbContinue,bbVarDeref,bbVarPyValue,@Spell
-
-" Vars definition
-syn match bbExport "^export" nextgroup=bbIdentifier skipwhite
-syn keyword bbExportFlag export contained nextgroup=bbIdentifier skipwhite
-syn match bbIdentifier "[a-zA-Z0-9\-_\.\/\+]\+" display contained
-syn match bbVarDeref "${[a-zA-Z0-9\-_\.\/\+]\+}" contained
-syn match bbVarEq "\(:=\|+=\|=+\|\.=\|=\.\|?=\|=\)" contained nextgroup=bbVarValue
-syn match bbVarDef "^\(export\s*\)\?\([a-zA-Z0-9\-_\.\/\+]\+\(_[${}a-zA-Z0-9\-_\.\/\+]\+\)\?\)\s*\(:=\|+=\|=+\|\.=\|=\.\|?=\|=\)\@=" contains=bbExportFlag,bbIdentifier,bbVarDeref nextgroup=bbVarEq
-syn match bbVarValue ".*$" contained contains=bbString,bbVarDeref,bbVarPyValue
-syn region bbVarPyValue start=+${@+ skip=+\\$+ excludenl end=+}+ contained contains=@python
-
-" Vars metadata flags
-syn match bbVarFlagDef "^\([a-zA-Z0-9\-_\.]\+\)\(\[[a-zA-Z0-9\-_\.]\+\]\)\@=" contains=bbIdentifier nextgroup=bbVarFlagFlag
-syn region bbVarFlagFlag matchgroup=bbArrayBrackets start="\[" end="\]\s*\(=\)\@=" keepend excludenl contained contains=bbIdentifier nextgroup=bbVarEq
-
-" Includes and requires
-syn keyword bbInclude inherit include require contained
-syn match bbIncludeRest ".*$" contained contains=bbString,bbVarDeref
-syn match bbIncludeLine "^\(inherit\|include\|require\)\s\+" contains=bbInclude nextgroup=bbIncludeRest
-
-" Add taks and similar
-syn keyword bbStatement addtask addhandler after before EXPORT_FUNCTIONS contained
-syn match bbStatementRest ".*$" skipwhite contained contains=bbStatement
-syn match bbStatementLine "^\(addtask\|addhandler\|after\|before\|EXPORT_FUNCTIONS\)\s\+" contains=bbStatement nextgroup=bbStatementRest
-
-" OE Important Functions
-syn keyword bbOEFunctions do_fetch do_unpack do_patch do_configure do_compile do_stage do_install do_package contained
-
-" Generic Functions
-syn match bbFunction "\h[0-9A-Za-z_-]*" display contained contains=bbOEFunctions
-
-" BitBake shell metadata
-syn include @shell syntax/sh.vim
-if exists("b:current_syntax")
- unlet b:current_syntax
-endif
-syn keyword bbShFakeRootFlag fakeroot contained
-syn match bbShFuncDef "^\(fakeroot\s*\)\?\([0-9A-Za-z_-]\+\)\(python\)\@<!\(\s*()\s*\)\({\)\@=" contains=bbShFakeRootFlag,bbFunction,bbDelimiter nextgroup=bbShFuncRegion skipwhite
-syn region bbShFuncRegion matchgroup=bbDelimiter start="{\s*$" end="^}\s*$" keepend contained contains=@shell
-
-" BitBake python metadata
-syn keyword bbPyFlag python contained
-syn match bbPyFuncDef "^\(python\s\+\)\([0-9A-Za-z_-]\+\)\?\(\s*()\s*\)\({\)\@=" contains=bbPyFlag,bbFunction,bbDelimiter nextgroup=bbPyFuncRegion skipwhite
-syn region bbPyFuncRegion matchgroup=bbDelimiter start="{\s*$" end="^}\s*$" keepend contained contains=@python
-
-" BitBake 'def'd python functions
-syn keyword bbPyDef def contained
-syn region bbPyDefRegion start='^\(def\s\+\)\([0-9A-Za-z_-]\+\)\(\s*(.*)\s*\):\s*$' end='^\(\s\|$\)\@!' contains=@python
-
-" Highlighting Definitions
-hi def link bbUnmatched Error
-hi def link bbInclude Include
-hi def link bbTodo Todo
-hi def link bbComment Comment
-hi def link bbQuote String
-hi def link bbString String
-hi def link bbDelimiter Keyword
-hi def link bbArrayBrackets Statement
-hi def link bbContinue Special
-hi def link bbExport Type
-hi def link bbExportFlag Type
-hi def link bbIdentifier Identifier
-hi def link bbVarDeref PreProc
-hi def link bbVarDef Identifier
-hi def link bbVarValue String
-hi def link bbShFakeRootFlag Type
-hi def link bbFunction Function
-hi def link bbPyFlag Type
-hi def link bbPyDef Statement
-hi def link bbStatement Statement
-hi def link bbStatementRest Identifier
-hi def link bbOEFunctions Special
-hi def link bbVarPyValue PreProc
-
-let b:current_syntax = "bb"
diff --git a/bitbake/doc/COPYING.GPL b/bitbake/doc/COPYING.GPL
deleted file mode 100644
index d511905c16..0000000000
--- a/bitbake/doc/COPYING.GPL
+++ /dev/null
@@ -1,339 +0,0 @@
- GNU GENERAL PUBLIC LICENSE
- Version 2, June 1991
-
- Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
- 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
- Preamble
-
- The licenses for most software are designed to take away your
-freedom to share and change it. By contrast, the GNU General Public
-License is intended to guarantee your freedom to share and change free
-software--to make sure the software is free for all its users. This
-General Public License applies to most of the Free Software
-Foundation's software and to any other program whose authors commit to
-using it. (Some other Free Software Foundation software is covered by
-the GNU Lesser General Public License instead.) You can apply it to
-your programs, too.
-
- When we speak of free software, we are referring to freedom, not
-price. Our General Public Licenses are designed to make sure that you
-have the freedom to distribute copies of free software (and charge for
-this service if you wish), that you receive source code or can get it
-if you want it, that you can change the software or use pieces of it
-in new free programs; and that you know you can do these things.
-
- To protect your rights, we need to make restrictions that forbid
-anyone to deny you these rights or to ask you to surrender the rights.
-These restrictions translate to certain responsibilities for you if you
-distribute copies of the software, or if you modify it.
-
- For example, if you distribute copies of such a program, whether
-gratis or for a fee, you must give the recipients all the rights that
-you have. You must make sure that they, too, receive or can get the
-source code. And you must show them these terms so they know their
-rights.
-
- We protect your rights with two steps: (1) copyright the software, and
-(2) offer you this license which gives you legal permission to copy,
-distribute and/or modify the software.
-
- Also, for each author's protection and ours, we want to make certain
-that everyone understands that there is no warranty for this free
-software. If the software is modified by someone else and passed on, we
-want its recipients to know that what they have is not the original, so
-that any problems introduced by others will not reflect on the original
-authors' reputations.
-
- Finally, any free program is threatened constantly by software
-patents. We wish to avoid the danger that redistributors of a free
-program will individually obtain patent licenses, in effect making the
-program proprietary. To prevent this, we have made it clear that any
-patent must be licensed for everyone's free use or not licensed at all.
-
- The precise terms and conditions for copying, distribution and
-modification follow.
-
- GNU GENERAL PUBLIC LICENSE
- TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
-
- 0. This License applies to any program or other work which contains
-a notice placed by the copyright holder saying it may be distributed
-under the terms of this General Public License. The "Program", below,
-refers to any such program or work, and a "work based on the Program"
-means either the Program or any derivative work under copyright law:
-that is to say, a work containing the Program or a portion of it,
-either verbatim or with modifications and/or translated into another
-language. (Hereinafter, translation is included without limitation in
-the term "modification".) Each licensee is addressed as "you".
-
-Activities other than copying, distribution and modification are not
-covered by this License; they are outside its scope. The act of
-running the Program is not restricted, and the output from the Program
-is covered only if its contents constitute a work based on the
-Program (independent of having been made by running the Program).
-Whether that is true depends on what the Program does.
-
- 1. You may copy and distribute verbatim copies of the Program's
-source code as you receive it, in any medium, provided that you
-conspicuously and appropriately publish on each copy an appropriate
-copyright notice and disclaimer of warranty; keep intact all the
-notices that refer to this License and to the absence of any warranty;
-and give any other recipients of the Program a copy of this License
-along with the Program.
-
-You may charge a fee for the physical act of transferring a copy, and
-you may at your option offer warranty protection in exchange for a fee.
-
- 2. You may modify your copy or copies of the Program or any portion
-of it, thus forming a work based on the Program, and copy and
-distribute such modifications or work under the terms of Section 1
-above, provided that you also meet all of these conditions:
-
- a) You must cause the modified files to carry prominent notices
- stating that you changed the files and the date of any change.
-
- b) You must cause any work that you distribute or publish, that in
- whole or in part contains or is derived from the Program or any
- part thereof, to be licensed as a whole at no charge to all third
- parties under the terms of this License.
-
- c) If the modified program normally reads commands interactively
- when run, you must cause it, when started running for such
- interactive use in the most ordinary way, to print or display an
- announcement including an appropriate copyright notice and a
- notice that there is no warranty (or else, saying that you provide
- a warranty) and that users may redistribute the program under
- these conditions, and telling the user how to view a copy of this
- License. (Exception: if the Program itself is interactive but
- does not normally print such an announcement, your work based on
- the Program is not required to print an announcement.)
-
-These requirements apply to the modified work as a whole. If
-identifiable sections of that work are not derived from the Program,
-and can be reasonably considered independent and separate works in
-themselves, then this License, and its terms, do not apply to those
-sections when you distribute them as separate works. But when you
-distribute the same sections as part of a whole which is a work based
-on the Program, the distribution of the whole must be on the terms of
-this License, whose permissions for other licensees extend to the
-entire whole, and thus to each and every part regardless of who wrote it.
-
-Thus, it is not the intent of this section to claim rights or contest
-your rights to work written entirely by you; rather, the intent is to
-exercise the right to control the distribution of derivative or
-collective works based on the Program.
-
-In addition, mere aggregation of another work not based on the Program
-with the Program (or with a work based on the Program) on a volume of
-a storage or distribution medium does not bring the other work under
-the scope of this License.
-
- 3. You may copy and distribute the Program (or a work based on it,
-under Section 2) in object code or executable form under the terms of
-Sections 1 and 2 above provided that you also do one of the following:
-
- a) Accompany it with the complete corresponding machine-readable
- source code, which must be distributed under the terms of Sections
- 1 and 2 above on a medium customarily used for software interchange; or,
-
- b) Accompany it with a written offer, valid for at least three
- years, to give any third party, for a charge no more than your
- cost of physically performing source distribution, a complete
- machine-readable copy of the corresponding source code, to be
- distributed under the terms of Sections 1 and 2 above on a medium
- customarily used for software interchange; or,
-
- c) Accompany it with the information you received as to the offer
- to distribute corresponding source code. (This alternative is
- allowed only for noncommercial distribution and only if you
- received the program in object code or executable form with such
- an offer, in accord with Subsection b above.)
-
-The source code for a work means the preferred form of the work for
-making modifications to it. For an executable work, complete source
-code means all the source code for all modules it contains, plus any
-associated interface definition files, plus the scripts used to
-control compilation and installation of the executable. However, as a
-special exception, the source code distributed need not include
-anything that is normally distributed (in either source or binary
-form) with the major components (compiler, kernel, and so on) of the
-operating system on which the executable runs, unless that component
-itself accompanies the executable.
-
-If distribution of executable or object code is made by offering
-access to copy from a designated place, then offering equivalent
-access to copy the source code from the same place counts as
-distribution of the source code, even though third parties are not
-compelled to copy the source along with the object code.
-
- 4. You may not copy, modify, sublicense, or distribute the Program
-except as expressly provided under this License. Any attempt
-otherwise to copy, modify, sublicense or distribute the Program is
-void, and will automatically terminate your rights under this License.
-However, parties who have received copies, or rights, from you under
-this License will not have their licenses terminated so long as such
-parties remain in full compliance.
-
- 5. You are not required to accept this License, since you have not
-signed it. However, nothing else grants you permission to modify or
-distribute the Program or its derivative works. These actions are
-prohibited by law if you do not accept this License. Therefore, by
-modifying or distributing the Program (or any work based on the
-Program), you indicate your acceptance of this License to do so, and
-all its terms and conditions for copying, distributing or modifying
-the Program or works based on it.
-
- 6. Each time you redistribute the Program (or any work based on the
-Program), the recipient automatically receives a license from the
-original licensor to copy, distribute or modify the Program subject to
-these terms and conditions. You may not impose any further
-restrictions on the recipients' exercise of the rights granted herein.
-You are not responsible for enforcing compliance by third parties to
-this License.
-
- 7. If, as a consequence of a court judgment or allegation of patent
-infringement or for any other reason (not limited to patent issues),
-conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not
-excuse you from the conditions of this License. If you cannot
-distribute so as to satisfy simultaneously your obligations under this
-License and any other pertinent obligations, then as a consequence you
-may not distribute the Program at all. For example, if a patent
-license would not permit royalty-free redistribution of the Program by
-all those who receive copies directly or indirectly through you, then
-the only way you could satisfy both it and this License would be to
-refrain entirely from distribution of the Program.
-
-If any portion of this section is held invalid or unenforceable under
-any particular circumstance, the balance of the section is intended to
-apply and the section as a whole is intended to apply in other
-circumstances.
-
-It is not the purpose of this section to induce you to infringe any
-patents or other property right claims or to contest validity of any
-such claims; this section has the sole purpose of protecting the
-integrity of the free software distribution system, which is
-implemented by public license practices. Many people have made
-generous contributions to the wide range of software distributed
-through that system in reliance on consistent application of that
-system; it is up to the author/donor to decide if he or she is willing
-to distribute software through any other system and a licensee cannot
-impose that choice.
-
-This section is intended to make thoroughly clear what is believed to
-be a consequence of the rest of this License.
-
- 8. If the distribution and/or use of the Program is restricted in
-certain countries either by patents or by copyrighted interfaces, the
-original copyright holder who places the Program under this License
-may add an explicit geographical distribution limitation excluding
-those countries, so that distribution is permitted only in or among
-countries not thus excluded. In such case, this License incorporates
-the limitation as if written in the body of this License.
-
- 9. The Free Software Foundation may publish revised and/or new versions
-of the General Public License from time to time. Such new versions will
-be similar in spirit to the present version, but may differ in detail to
-address new problems or concerns.
-
-Each version is given a distinguishing version number. If the Program
-specifies a version number of this License which applies to it and "any
-later version", you have the option of following the terms and conditions
-either of that version or of any later version published by the Free
-Software Foundation. If the Program does not specify a version number of
-this License, you may choose any version ever published by the Free Software
-Foundation.
-
- 10. If you wish to incorporate parts of the Program into other free
-programs whose distribution conditions are different, write to the author
-to ask for permission. For software which is copyrighted by the Free
-Software Foundation, write to the Free Software Foundation; we sometimes
-make exceptions for this. Our decision will be guided by the two goals
-of preserving the free status of all derivatives of our free software and
-of promoting the sharing and reuse of software generally.
-
- NO WARRANTY
-
- 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
-FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
-OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
-PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
-OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
-TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
-PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
-REPAIR OR CORRECTION.
-
- 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
-REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
-INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
-OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
-TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
-YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
-PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
-POSSIBILITY OF SUCH DAMAGES.
-
- END OF TERMS AND CONDITIONS
-
- How to Apply These Terms to Your New Programs
-
- If you develop a new program, and you want it to be of the greatest
-possible use to the public, the best way to achieve this is to make it
-free software which everyone can redistribute and change under these terms.
-
- To do so, attach the following notices to the program. It is safest
-to attach them to the start of each source file to most effectively
-convey the exclusion of warranty; and each file should have at least
-the "copyright" line and a pointer to where the full notice is found.
-
- <one line to give the program's name and a brief idea of what it does.>
- Copyright (C) <year> <name of author>
-
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 2 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License along
- with this program; if not, write to the Free Software Foundation, Inc.,
- 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-Also add information on how to contact you by electronic and paper mail.
-
-If the program is interactive, make it output a short notice like this
-when it starts in an interactive mode:
-
- Gnomovision version 69, Copyright (C) year name of author
- Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
- This is free software, and you are welcome to redistribute it
- under certain conditions; type `show c' for details.
-
-The hypothetical commands `show w' and `show c' should show the appropriate
-parts of the General Public License. Of course, the commands you use may
-be called something other than `show w' and `show c'; they could even be
-mouse-clicks or menu items--whatever suits your program.
-
-You should also get your employer (if you work as a programmer) or your
-school, if any, to sign a "copyright disclaimer" for the program, if
-necessary. Here is a sample; alter the names:
-
- Yoyodyne, Inc., hereby disclaims all copyright interest in the program
- `Gnomovision' (which makes passes at compilers) written by James Hacker.
-
- <signature of Ty Coon>, 1 April 1989
- Ty Coon, President of Vice
-
-This General Public License does not permit incorporating your program into
-proprietary programs. If your program is a subroutine library, you may
-consider it more useful to permit linking proprietary applications with the
-library. If this is what you want to do, use the GNU Lesser General
-Public License instead of this License.
diff --git a/bitbake/doc/COPYING.MIT b/bitbake/doc/COPYING.MIT
deleted file mode 100644
index 7e7d57413d..0000000000
--- a/bitbake/doc/COPYING.MIT
+++ /dev/null
@@ -1,17 +0,0 @@
-Permission is hereby granted, free of charge, to any person obtaining a copy
-of this software and associated documentation files (the "Software"), to deal
-in the Software without restriction, including without limitation the rights
-to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-copies of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be included in all
-copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
-SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
-DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
-OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
-THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/bitbake/doc/bitbake.1 b/bitbake/doc/bitbake.1
deleted file mode 100644
index 036402e8ac..0000000000
--- a/bitbake/doc/bitbake.1
+++ /dev/null
@@ -1,121 +0,0 @@
-.\" Hey, EMACS: -*- nroff -*-
-.\" First parameter, NAME, should be all caps
-.\" Second parameter, SECTION, should be 1-8, maybe w/ subsection
-.\" other parameters are allowed: see man(7), man(1)
-.TH BITBAKE 1 "November 19, 2006"
-.\" Please adjust this date whenever revising the manpage.
-.\"
-.\" Some roff macros, for reference:
-.\" .nh disable hyphenation
-.\" .hy enable hyphenation
-.\" .ad l left justify
-.\" .ad b justify to both left and right margins
-.\" .nf disable filling
-.\" .fi enable filling
-.\" .br insert line break
-.\" .sp <n> insert n+1 empty lines
-.\" for manpage-specific macros, see man(7)
-.SH NAME
-BitBake \- simple tool for the execution of tasks
-.SH SYNOPSIS
-.B bitbake
-.RI [ options ] " packagenames"
-.br
-.SH DESCRIPTION
-This manual page documents briefly the
-.B bitbake
-command.
-.PP
-.\" TeX users may be more comfortable with the \fB<whatever>\fP and
-.\" \fI<whatever>\fP escape sequences to invode bold face and italics,
-.\" respectively.
-\fBbitbake\fP is a program that executes the specified task (default is 'build')
-for a given set of BitBake files.
-.br
-It expects that BBFILES is defined, which is a space separated list of files to
-be executed. BBFILES does support wildcards.
-.br
-Default BBFILES are the .bb files in the current directory.
-.SH OPTIONS
-This program follow the usual GNU command line syntax, with long
-options starting with two dashes (`-').
-.TP
-.B \-h, \-\-help
-Show summary of options.
-.TP
-.B \-\-version
-Show version of program.
-.TP
-.B \-bBUILDFILE, \-\-buildfile=BUILDFILE
-execute the task against this .bb file, rather than a package from BBFILES.
-.TP
-.B \-k, \-\-continue
-continue as much as possible after an error. While the target that failed, and
-those that depend on it, cannot be remade, the other dependencies of these
-targets can be processed all the same.
-.TP
-.B \-a, \-\-tryaltconfigs
-continue with builds by trying to use alternative providers where possible.
-.TP
-.B \-f, \-\-force
-force run of specified cmd, regardless of stamp status
-.TP
-.B \-i, \-\-interactive
-drop into the interactive mode also called the BitBake shell.
-.TP
-.B \-cCMD, \-\-cmd=CMD
-Specify task to execute. Note that this only executes the specified task for
-the providee and the packages it depends on, i.e. 'compile' does not implicitly
-call stage for the dependencies (IOW: use only if you know what you are doing).
-Depending on the base.bbclass a listtasks task is defined and will show
-available tasks.
-.TP
-.B \-rFILE, \-\-read=FILE
-read the specified file before bitbake.conf
-.TP
-.B \-v, \-\-verbose
-output more chit-chat to the terminal
-.TP
-.B \-D, \-\-debug
-Increase the debug level. You can specify this more than once.
-.TP
-.B \-n, \-\-dry-run
-don't execute, just go through the motions
-.TP
-.B \-p, \-\-parse-only
-quit after parsing the BB files (developers only)
-.TP
-.B \-d, \-\-disable-psyco
-disable using the psyco just-in-time compiler (not recommended)
-.TP
-.B \-s, \-\-show-versions
-show current and preferred versions of all packages
-.TP
-.B \-e, \-\-environment
-show the global or per-package environment (this is what used to be bbread)
-.TP
-.B \-g, \-\-graphviz
-emit the dependency trees of the specified packages in the dot syntax
-.TP
-.B \-IIGNORED\_DOT\_DEPS, \-\-ignore-deps=IGNORED_DOT_DEPS
-Stop processing at the given list of dependencies when generating dependency
-graphs. This can help to make the graph more appealing
-.TP
-.B \-lDEBUG_DOMAINS, \-\-log-domains=DEBUG_DOMAINS
-Show debug logging for the specified logging domains
-.TP
-.B \-P, \-\-profile
-profile the command and print a report
-.TP
-
-.SH AUTHORS
-BitBake was written by
-Phil Blundell,
-Holger Freyther,
-Chris Larson,
-Mickey Lauer,
-Richard Purdie,
-Holger Schurig
-.PP
-This manual page was written by Marcin Juszkiewicz <marcin@hrw.one.pl>
-for the Debian project (but may be used by others).
diff --git a/bitbake/doc/manual/Makefile b/bitbake/doc/manual/Makefile
deleted file mode 100644
index a43c025455..0000000000
--- a/bitbake/doc/manual/Makefile
+++ /dev/null
@@ -1,56 +0,0 @@
-topdir = .
-manual = $(topdir)/usermanual.xml
-# types = pdf txt rtf ps xhtml html man tex texi dvi
-# types = pdf txt
-types = $(xmltotypes) $(htmltypes)
-xmltotypes = pdf txt
-htmltypes = html xhtml
-htmlxsl = $(if $(filter $@,$(foreach type,$(htmltypes),$(type)-nochunks)),http://docbook.sourceforge.net/release/xsl/current/xhtml/docbook.xsl,http://docbook.sourceforge.net/release/xsl/current/$@/chunk.xsl)
-htmlcssfile = docbook.css
-htmlcss = $(topdir)/html.css
-# htmlcssfile =
-# htmlcss =
-cleanfiles = $(foreach i,$(types),$(topdir)/$(i))
-
-ifdef DEBUG
-define command
- $(1)
-endef
-else
-define command
- @echo $(2) $(3) $(4)
- @$(1) >/dev/null
-endef
-endif
-
-all: $(types)
-
-lint: $(manual) FORCE
- $(call command,xmllint --xinclude --postvalid --noout $(manual),XMLLINT $(manual))
-
-$(types) $(foreach type,$(htmltypes),$(type)-nochunks): lint FORCE
-
-$(foreach type,$(htmltypes),$(type)-nochunks): $(if $(htmlcss),$(htmlcss)) $(manual)
- @mkdir -p $@
-ifdef htmlcss
- $(call command,install -m 0644 $(htmlcss) $@/$(htmlcssfile),CP $(htmlcss) $@/$(htmlcssfile))
-endif
- $(call command,xsltproc --stringparam base.dir $@/ $(if $(htmlcssfile),--stringparam html.stylesheet $(htmlcssfile)) $(htmlxsl) $(manual) > $@/index.$(patsubst %-nochunks,%,$@),XSLTPROC $@ $(manual))
-
-$(htmltypes): $(if $(htmlcss),$(htmlcss)) $(manual)
- @mkdir -p $@
-ifdef htmlcss
- $(call command,install -m 0644 $(htmlcss) $@/$(htmlcssfile),CP $(htmlcss) $@/$(htmlcssfile))
-endif
- $(call command,xsltproc --stringparam base.dir $@/ $(if $(htmlcssfile),--stringparam html.stylesheet $(htmlcssfile)) $(htmlxsl) $(manual),XSLTPROC $@ $(manual))
-
-$(xmltotypes): $(manual)
- $(call command,xmlto --extensions -o $(topdir)/$@ $@ $(manual),XMLTO $@ $(manual))
-
-clean:
- rm -rf $(cleanfiles)
-
-$(foreach i,$(types) $(foreach type,$(htmltypes),$(type)-nochunks),clean-$(i)):
- rm -rf $(patsubst clean-%,%,$@)
-
-FORCE:
diff --git a/bitbake/doc/manual/html.css b/bitbake/doc/manual/html.css
deleted file mode 100644
index 6eedfd3189..0000000000
--- a/bitbake/doc/manual/html.css
+++ /dev/null
@@ -1,281 +0,0 @@
-/* Feuille de style DocBook du projet Traduc.org */
-/* DocBook CSS stylesheet of the Traduc.org project */
-
-/* (c) Jean-Philippe Guérard - 14 août 2004 */
-/* (c) Jean-Philippe Guérard - 14 August 2004 */
-
-/* Cette feuille de style est libre, vous pouvez la */
-/* redistribuer et la modifier selon les termes de la Licence */
-/* Art Libre. Vous trouverez un exemplaire de cette Licence sur */
-/* http://tigreraye.org/Petit-guide-du-traducteur.html#licence-art-libre */
-
-/* This work of art is free, you can redistribute it and/or */
-/* modify it according to terms of the Free Art license. You */
-/* will find a specimen of this license on the Copyleft */
-/* Attitude web site: http://artlibre.org as well as on other */
-/* sites. */
-/* Please note that the French version of this licence as shown */
-/* on http://tigreraye.org/Petit-guide-du-traducteur.html#licence-art-libre */
-/* is only official licence of this document. The English */
-/* is only provided to help you understand this licence. */
-
-/* La dernière version de cette feuille de style est toujours */
-/* disponible sur : http://tigreraye.org/style.css */
-/* Elle est également disponible sur : */
-/* http://www.traduc.org/docs/HOWTO/lecture/style.css */
-
-/* The latest version of this stylesheet is available from: */
-/* http://tigreraye.org/style.css */
-/* It is also available on: */
-/* http://www.traduc.org/docs/HOWTO/lecture/style.css */
-
-/* N'hésitez pas à envoyer vos commentaires et corrections à */
-/* Jean-Philippe Guérard <jean-philippe.guerard@tigreraye.org> */
-
-/* Please send feedback and bug reports to */
-/* Jean-Philippe Guérard <jean-philippe.guerard@tigreraye.org> */
-
-/* $Id: style.css,v 1.14 2004/09/10 20:12:09 fevrier Exp fevrier $ */
-
-/* Présentation générale du document */
-/* Overall document presentation */
-
-body {
- /*
- font-family: Apolline, "URW Palladio L", Garamond, jGaramond,
- "Bitstream Cyberbit", "Palatino Linotype", serif;
- */
- margin: 7%;
- background-color: white;
-}
-
-/* Taille du texte */
-/* Text size */
-
-* { font-size: 100%; }
-
-/* Gestion des textes mis en relief imbriqués */
-/* Embedded emphasis */
-
-em { font-style: italic; }
-em em { font-style: normal; }
-em em em { font-style: italic; }
-
-/* Titres */
-/* Titles */
-
-h1 { font-size: 200%; font-weight: 900; }
-h2 { font-size: 160%; font-weight: 900; }
-h3 { font-size: 130%; font-weight: bold; }
-h4 { font-size: 115%; font-weight: bold; }
-h5 { font-size: 108%; font-weight: bold; }
-h6 { font-weight: bold; }
-
-/* Nom de famille en petites majuscules (uniquement en français) */
-/* Last names in small caps (for French only) */
-
-*[class~="surname"]:lang(fr) { font-variant: small-caps; }
-
-/* Blocs de citation */
-/* Quotation blocs */
-
-div[class~="blockquote"] {
- border: solid 2px #AAA;
- padding: 5px;
- margin: 5px;
-}
-
-div[class~="blockquote"] > table {
- border: none;
-}
-
-/* Blocs litéraux : fond gris clair */
-/* Literal blocs: light gray background */
-
-*[class~="literallayout"] {
- background: #f0f0f0;
- padding: 5px;
- margin: 5px;
-}
-
-/* Programmes et captures texte : fond bleu clair */
-/* Listing and text screen snapshots: light blue background */
-
-*[class~="programlisting"], *[class~="screen"] {
- background: #f0f0ff;
- padding: 5px;
- margin: 5px;
-}
-
-/* Les textes à remplacer sont surlignés en vert pâle */
-/* Replaceable text in highlighted in pale green */
-
-*[class~="replaceable"] {
- background-color: #98fb98;
- font-style: normal; }
-
-/* Tables : fonds gris clair & bords simples */
-/* Tables: light gray background and solid borders */
-
-*[class~="table"] *[class~="title"] { width:100%; border: 0px; }
-
-table {
- border: 1px solid #aaa;
- border-collapse: collapse;
- padding: 2px;
- margin: 5px;
-}
-
-/* Listes simples en style table */
-/* Simples lists in table presentation */
-
-table[class~="simplelist"] {
- background-color: #F0F0F0;
- margin: 5px;
- border: solid 1px #AAA;
-}
-
-table[class~="simplelist"] td {
- border: solid 1px #AAA;
-}
-
-/* Les tables */
-/* Tables */
-
-*[class~="table"] table {
- background-color: #F0F0F0;
- border: solid 1px #AAA;
-}
-*[class~="informaltable"] table { background-color: #F0F0F0; }
-
-th,td {
- vertical-align: baseline;
- text-align: left;
- padding: 0.1em 0.3em;
- empty-cells: show;
-}
-
-/* Alignement des colonnes */
-/* Colunms alignment */
-
-td[align=center] , th[align=center] { text-align: center; }
-td[align=right] , th[align=right] { text-align: right; }
-td[align=left] , th[align=left] { text-align: left; }
-td[align=justify] , th[align=justify] { text-align: justify; }
-
-/* Pas de marge autour des images */
-/* No inside margins for images */
-
-img { border: 0; }
-
-/* Les liens ne sont pas soulignés */
-/* No underlines for links */
-
-:link , :visited , :active { text-decoration: none; }
-
-/* Prudence : cadre jaune et fond jaune clair */
-/* Caution: yellow border and light yellow background */
-
-*[class~="caution"] {
- border: solid 2px yellow;
- background-color: #ffffe0;
- padding: 1em 6px 1em ;
- margin: 5px;
-}
-
-*[class~="caution"] th {
- vertical-align: middle
-}
-
-*[class~="caution"] table {
- background-color: #ffffe0;
- border: none;
-}
-
-/* Note importante : cadre jaune et fond jaune clair */
-/* Important: yellow border and light yellow background */
-
-*[class~="important"] {
- border: solid 2px yellow;
- background-color: #ffffe0;
- padding: 1em 6px 1em;
- margin: 5px;
-}
-
-*[class~="important"] th {
- vertical-align: middle
-}
-
-*[class~="important"] table {
- background-color: #ffffe0;
- border: none;
-}
-
-/* Mise en évidence : texte légèrement plus grand */
-/* Highlights: slightly larger texts */
-
-*[class~="highlights"] {
- font-size: 110%;
-}
-
-/* Note : cadre bleu et fond bleu clair */
-/* Notes: blue border and light blue background */
-
-*[class~="note"] {
- border: solid 2px #7099C5;
- background-color: #f0f0ff;
- padding: 1em 6px 1em ;
- margin: 5px;
-}
-
-*[class~="note"] th {
- vertical-align: middle
-}
-
-*[class~="note"] table {
- background-color: #f0f0ff;
- border: none;
-}
-
-/* Astuce : cadre vert et fond vert clair */
-/* Tip: green border and light green background */
-
-*[class~="tip"] {
- border: solid 2px #00ff00;
- background-color: #f0ffff;
- padding: 1em 6px 1em ;
- margin: 5px;
-}
-
-*[class~="tip"] th {
- vertical-align: middle;
-}
-
-*[class~="tip"] table {
- background-color: #f0ffff;
- border: none;
-}
-
-/* Avertissement : cadre rouge et fond rouge clair */
-/* Warning: red border and light red background */
-
-*[class~="warning"] {
- border: solid 2px #ff0000;
- background-color: #fff0f0;
- padding: 1em 6px 1em ;
- margin: 5px;
-}
-
-*[class~="warning"] th {
- vertical-align: middle;
-}
-
-
-*[class~="warning"] table {
- background-color: #fff0f0;
- border: none;
-}
-
-/* Fin */
-/* The End */
-
diff --git a/bitbake/doc/manual/usermanual.xml b/bitbake/doc/manual/usermanual.xml
deleted file mode 100644
index 32b40eee54..0000000000
--- a/bitbake/doc/manual/usermanual.xml
+++ /dev/null
@@ -1,534 +0,0 @@
-<?xml version="1.0"?>
-<!--
- ex:ts=4:sw=4:sts=4:et
- -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
--->
-<!DOCTYPE book PUBLIC "-//OASIS//DTD DocBook XML V4.2//EN"
- "http://www.oasis-open.org/docbook/xml/4.2/docbookx.dtd">
-<book>
- <bookinfo>
- <title>BitBake User Manual</title>
- <authorgroup>
- <corpauthor>BitBake Team</corpauthor>
- </authorgroup>
- <copyright>
- <year>2004, 2005, 2006</year>
- <holder>Chris Larson</holder>
- <holder>Phil Blundell</holder>
- </copyright>
- <legalnotice>
- <para>This work is licensed under the Creative Commons Attribution License. To view a copy of this license, visit <ulink url="http://creativecommons.org/licenses/by/2.5/">http://creativecommons.org/licenses/by/2.5/</ulink> or send a letter to Creative Commons, 559 Nathan Abbott Way, Stanford, California 94305, USA.</para>
- </legalnotice>
- </bookinfo>
- <chapter>
- <title>Introduction</title>
- <section>
- <title>Overview</title>
- <para>BitBake is, at its simplest, a tool for executing
-tasks and managing metadata. As such, its similarities to GNU make and other
-build tools are readily apparent. It was inspired by Portage, the package management system used by the Gentoo Linux distribution. BitBake is the basis of the <ulink url="http://www.openembedded.org/">OpenEmbedded</ulink> project, which is being used to build and maintain a number of embedded Linux distributions, including OpenZaurus and Familiar.</para>
- </section>
- <section>
- <title>Background and Goals</title>
- <para>Prior to BitBake, no other build tool adequately met
-the needs of an aspiring embedded Linux distribution. All of the
-buildsystems used by traditional desktop Linux distributions lacked
-important functionality, and none of the ad-hoc
-<emphasis>buildroot</emphasis> systems, prevalent in the
-embedded space, were scalable or maintainable.</para>
-
- <para>Some important goals for BitBake were:
- <itemizedlist>
- <listitem><para>Handle crosscompilation.</para></listitem>
- <listitem><para>Handle interpackage dependencies (build time on target architecture, build time on native architecture, and runtime).</para></listitem>
- <listitem><para>Support running any number of tasks within a given package, including, but not limited to, fetching upstream sources, unpacking them, patching them, configuring them, et cetera.</para></listitem>
- <listitem><para>Must be linux distribution agnostic (both build and target).</para></listitem>
- <listitem><para>Must be architecture agnostic</para></listitem>
- <listitem><para>Must support multiple build and target operating systems (including cygwin, the BSDs, etc).</para></listitem>
- <listitem><para>Must be able to be self contained, rather than tightly integrated into the build machine's root filesystem.</para></listitem>
- <listitem><para>There must be a way to handle conditional metadata (on target architecture, operating system, distribution, machine).</para></listitem>
- <listitem><para>It must be easy for the person using the tools to supply their own local metadata and packages to operate against.</para></listitem>
- <listitem><para>Must make it easy to collaborate
-between multiple projects using BitBake for their
-builds.</para></listitem>
- <listitem><para>Should provide an inheritance mechanism to
-share common metadata between many packages.</para></listitem>
- <listitem><para>Et cetera...</para></listitem>
- </itemizedlist>
- </para>
- <para>BitBake satisfies all these and many more. Flexibility and power have always been the priorities. It is highly extensible, supporting embedded Python code and execution of any arbitrary tasks.</para>
- </section>
- </chapter>
- <chapter>
- <title>Metadata</title>
- <section>
- <title>Description</title>
- <itemizedlist>
- <para>BitBake metadata can be classified into 3 major areas:</para>
- <listitem>
- <para>Configuration Files</para>
- </listitem>
- <listitem>
- <para>.bb Files</para>
- </listitem>
- <listitem>
- <para>Classes</para>
- </listitem>
- </itemizedlist>
- <para>What follows are a large number of examples of BitBake metadata. Any syntax which isn't supported in any of the aforementioned areas will be documented as such.</para>
- <section>
- <title>Basic variable setting</title>
- <para><screen><varname>VARIABLE</varname> = "value"</screen></para>
- <para>In this example, <varname>VARIABLE</varname> is <literal>value</literal>.</para>
- </section>
- <section>
- <title>Variable expansion</title>
- <para>BitBake supports variables referencing one another's contents using a syntax which is similar to shell scripting</para>
- <para><screen><varname>A</varname> = "aval"
-<varname>B</varname> = "pre${A}post"</screen></para>
- <para>This results in <varname>A</varname> containing <literal>aval</literal> and <varname>B</varname> containing <literal>preavalpost</literal>.</para>
- </section>
- <section>
- <title>Setting a default value (?=)</title>
- <para><screen><varname>A</varname> ?= "aval"</screen></para>
- <para>If <varname>A</varname> is set before the above is called, it will retain it's previous value. If <varname>A</varname> is unset prior to the above call, <varname>A</varname> will be set to <literal>aval</literal>. Note that this assignment is immediate, so if there are multiple ?= assignments to a single variable, the first of those will be used.</para>
- </section>
- <section>
- <title>Setting a default value (??=)</title>
- <para><screen><varname>A</varname> ??= "somevalue"</screen></para>
- <para><screen><varname>A</varname> ??= "someothervalue"</screen></para>
- <para>If <varname>A</varname> is set before the above, it will retain that value. If <varname>A</varname> is unset prior to the above, <varname>A</varname> will be set to <literal>someothervalue</literal>. This is a lazy version of ??=, in that the assignment does not occur until the end of the parsing process, so that the last, rather than the first, ??= assignment to a given variable will be used.</para>
- </section>
- <section>
- <title>Immediate variable expansion (:=)</title>
- <para>:= results in a variable's contents being expanded immediately, rather than when the variable is actually used.</para>
- <para><screen><varname>T</varname> = "123"
-<varname>A</varname> := "${B} ${A} test ${T}"
-<varname>T</varname> = "456"
-<varname>B</varname> = "${T} bval"
-
-<varname>C</varname> = "cval"
-<varname>C</varname> := "${C}append"</screen></para>
- <para>In that example, <varname>A</varname> would contain <literal> test 123</literal>, <varname>B</varname> would contain <literal>456 bval</literal>, and <varname>C</varname> would be <literal>cvalappend</literal>.</para>
- </section>
- <section>
- <title>Appending (+=) and prepending (=+)</title>
- <para><screen><varname>B</varname> = "bval"
-<varname>B</varname> += "additionaldata"
-<varname>C</varname> = "cval"
-<varname>C</varname> =+ "test"</screen></para>
- <para>In this example, <varname>B</varname> is now <literal>bval additionaldata</literal> and <varname>C</varname> is <literal>test cval</literal>.</para>
- </section>
- <section>
- <title>Appending (.=) and prepending (=.) without spaces</title>
- <para><screen><varname>B</varname> = "bval"
-<varname>B</varname> .= "additionaldata"
-<varname>C</varname> = "cval"
-<varname>C</varname> =. "test"</screen></para>
- <para>In this example, <varname>B</varname> is now <literal>bvaladditionaldata</literal> and <varname>C</varname> is <literal>testcval</literal>. In contrast to the above Appending and Prepending operators no additional space
-will be introduced.</para>
- </section>
- <section>
- <title>Conditional metadata set</title>
- <para>OVERRIDES is a <quote>:</quote> separated variable containing each item you want to satisfy conditions. So, if you have a variable which is conditional on <quote>arm</quote>, and <quote>arm</quote> is in OVERRIDES, then the <quote>arm</quote> specific version of the variable is used rather than the non-conditional version. Example:</para>
- <para><screen><varname>OVERRIDES</varname> = "architecture:os:machine"
-<varname>TEST</varname> = "defaultvalue"
-<varname>TEST_os</varname> = "osspecificvalue"
-<varname>TEST_condnotinoverrides</varname> = "othercondvalue"</screen></para>
- <para>In this example, <varname>TEST</varname> would be <literal>osspecificvalue</literal>, due to the condition <quote>os</quote> being in <varname>OVERRIDES</varname>.</para>
- </section>
- <section>
- <title>Conditional appending</title>
- <para>BitBake also supports appending and prepending to variables based on whether something is in OVERRIDES. Example:</para>
- <para><screen><varname>DEPENDS</varname> = "glibc ncurses"
-<varname>OVERRIDES</varname> = "machine:local"
-<varname>DEPENDS_append_machine</varname> = " libmad"</screen></para>
- <para>In this example, <varname>DEPENDS</varname> is set to <literal>glibc ncurses libmad</literal>.</para>
- </section>
- <section>
- <title>Inclusion</title>
- <para>Next, there is the <literal>include</literal> directive, which causes BitBake to parse in whatever file you specify, and insert it at that location, which is not unlike <command>make</command>. However, if the path specified on the <literal>include</literal> line is a relative path, BitBake will locate the first one it can find within <envar>BBPATH</envar>.</para>
- </section>
- <section>
- <title>Requiring Inclusion</title>
- <para>In contrast to the <literal>include</literal> directive, <literal>require</literal> will
-raise an ParseError if the to be included file can not be found. Otherwise it will behave just like the <literal>
-include</literal> directive.</para>
- </section>
- <section>
- <title>Python variable expansion</title>
- <para><screen><varname>DATE</varname> = "${@time.strftime('%Y%m%d',time.gmtime())}"</screen></para>
- <para>This would result in the <varname>DATE</varname> variable containing today's date.</para>
- </section>
- <section>
- <title>Defining executable metadata</title>
- <para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para>
- <para><screen>do_mytask () {
- echo "Hello, world!"
-}</screen></para>
- <para>This is essentially identical to setting a variable, except that this variable happens to be executable shell code.</para>
- <para><screen>python do_printdate () {
- import time
- print time.strftime('%Y%m%d', time.gmtime())
-}</screen></para>
- <para>This is the similar to the previous, but flags it as python so that BitBake knows it is python code.</para>
- </section>
- <section>
- <title>Defining python functions into the global python namespace</title>
- <para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para>
- <para><screen>def get_depends(bb, d):
- if bb.data.getVar('SOMECONDITION', d, True):
- return "dependencywithcond"
- else:
- return "dependency"
-
-<varname>SOMECONDITION</varname> = "1"
-<varname>DEPENDS</varname> = "${@get_depends(bb, d)}"</screen></para>
- <para>This would result in <varname>DEPENDS</varname> containing <literal>dependencywithcond</literal>.</para>
- </section>
- <section>
- <title>Variable Flags</title>
- <para>Variables can have associated flags which provide a way of tagging extra information onto a variable. Several flags are used internally by bitbake but they can be used externally too if needed. The standard operations mentioned above also work on flags.</para>
- <para><screen><varname>VARIABLE</varname>[<varname>SOMEFLAG</varname>] = "value"</screen></para>
- <para>In this example, <varname>VARIABLE</varname> has a flag, <varname>SOMEFLAG</varname> which is set to <literal>value</literal>.</para>
- </section>
- <section>
- <title>Inheritance</title>
- <para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para>
- <para>The <literal>inherit</literal> directive is a means of specifying what classes of functionality your .bb requires. It is a rudimentary form of inheritance. For example, you can easily abstract out the tasks involved in building a package that uses autoconf and automake, and put that into a bbclass for your packages to make use of. A given bbclass is located by searching for classes/filename.oeclass in <envar>BBPATH</envar>, where filename is what you inherited.</para>
- </section>
- <section>
- <title>Tasks</title>
- <para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para>
- <para>In BitBake, each step that needs to be run for a given .bb is known as a task. There is a command <literal>addtask</literal> to add new tasks (must be a defined python executable metadata and must start with <quote>do_</quote>) and describe intertask dependencies.</para>
- <para><screen>python do_printdate () {
- import time
- print time.strftime('%Y%m%d', time.gmtime())
-}
-
-addtask printdate before do_build</screen></para>
- <para>This defines the necessary python function and adds it as a task which is now a dependency of do_build (the default task). If anyone executes the do_build task, that will result in do_printdate being run first.</para>
- </section>
- <section>
- <title>Events</title>
- <para><emphasis>NOTE:</emphasis> This is only supported in .bb and .bbclass files.</para>
- <para>BitBake allows to install event handlers. Events are triggered at certain points during operation, such as, the beginning of operation against a given .bb, the start of a given task, task failure, task success, et cetera. The intent was to make it easy to do things like email notifications on build failure.</para>
- <para><screen>addhandler myclass_eventhandler
-python myclass_eventhandler() {
- from bb.event import getName
- from bb import data
-
- print("The name of the Event is %s" % getName(e))
- print("The file we run for is %s" % data.getVar('FILE', e.data, True))
-}
-</screen></para><para>
-This event handler gets called every time an event is triggered. A global variable <varname>e</varname> is defined. <varname>e</varname>.data contains an instance of bb.data. With the getName(<varname>e</varname>)
-method one can get the name of the triggered event.</para><para>The above event handler prints the name
-of the event and the content of the <varname>FILE</varname> variable.</para>
- </section>
- <section>
- <title>Variants</title>
- <para>Two Bitbake features exist to facilitate the creation of multiple buildable incarnations from a single recipe file.</para>
- <para>The first is <varname>BBCLASSEXTEND</varname>. This variable is a space separated list of classes to utilize to "extend" the recipe for each variant. As an example, setting <screen>BBCLASSEXTEND = "native"</screen> results in a second incarnation of the current recipe being available. This second incarantion will have the "native" class inherited.</para>
- <para>The second feature is <varname>BBVERSIONS</varname>. This variable allows a single recipe to be able to build multiple versions of a project from a single recipe file, and allows you to specify conditional metadata (using the <varname>OVERRIDES</varname> mechanism) for a single version, or an optionally named range of versions:</para>
- <para><screen>BBVERSIONS = "1.0 2.0 git"
-SRC_URI_git = "git://someurl/somepath.git"</screen></para>
- <para><screen>BBVERSIONS = "1.0.[0-6]:1.0.0+ \
- 1.0.[7-9]:1.0.7+"
-SRC_URI_append_1.0.7+ = "file://some_patch_which_the_new_versions_need.patch;patch=1"</screen></para>
- <para>Note that the name of the range will default to the original version of the recipe, so given OE, a recipe file of foo_1.0.0+.bb will default the name of its versions to 1.0.0+. This is useful, as the range name is not only placed into overrides, it's also made available for the metadata to use in the form of the <varname>BPV</varname> variable, for use in file:// search paths (<varname>FILESPATH</varname>).</para>
- </section>
- </section>
- <section>
- <title>Dependency Handling</title>
- <para>Bitbake 1.7.x onwards works with the metadata at the task level since this is optimal when dealing with multiple threads of execution. A robust method of specifing task dependencies is therefore needed. </para>
- <section>
- <title>Dependencies internal to the .bb file</title>
- <para>Where the dependencies are internal to a given .bb file, the dependencies are handled by the previously detailed addtask directive.</para>
- </section>
-
- <section>
- <title>DEPENDS</title>
- <para>DEPENDS is taken to specify build time dependencies. The 'deptask' flag for tasks is used to signify the task of each DEPENDS which must have completed before that task can be executed.</para>
- <para><screen>do_configure[deptask] = "do_populate_staging"</screen></para>
- <para>means the do_populate_staging task of each item in DEPENDS must have completed before do_configure can execute.</para>
- </section>
- <section>
- <title>RDEPENDS</title>
- <para>RDEPENDS is taken to specify runtime dependencies. The 'rdeptask' flag for tasks is used to signify the task of each RDEPENDS which must have completed before that task can be executed.</para>
- <para><screen>do_package_write[rdeptask] = "do_package"</screen></para>
- <para>means the do_package task of each item in RDEPENDS must have completed before do_package_write can execute.</para>
- </section>
- <section>
- <title>Recursive DEPENDS</title>
- <para>These are specified with the 'recdeptask' flag and is used signify the task(s) of each DEPENDS which must have completed before that task can be executed. It applies recursively so also, the DEPENDS of each item in the original DEPENDS must be met and so on.</para>
- </section>
- <section>
- <title>Recursive RDEPENDS</title>
- <para>These are specified with the 'recrdeptask' flag and is used signify the task(s) of each RDEPENDS which must have completed before that task can be executed. It applies recursively so also, the RDEPENDS of each item in the original RDEPENDS must be met and so on. It also runs all DEPENDS first too.</para>
- </section>
- <section>
- <title>Inter Task</title>
- <para>The 'depends' flag for tasks is a more generic form of which allows an interdependency on specific tasks rather than specifying the data in DEPENDS or RDEPENDS.</para>
- <para><screen>do_patch[depends] = "quilt-native:do_populate_staging"</screen></para>
- <para>means the do_populate_staging task of the target quilt-native must have completed before the do_patch can execute.</para>
- </section>
- </section>
-
- <section>
- <title>Parsing</title>
- <section>
- <title>Configuration Files</title>
- <para>The first of the classifications of metadata in BitBake is configuration metadata. This metadata is global, and therefore affects <emphasis>all</emphasis> packages and tasks which are executed.</para>
- <para>Bitbake will first search the current working directory for an optional "conf/bblayers.conf" configuration file. This file is expected to contain a BBLAYERS variable which is a space delimited list of 'layer' directories. For each directory in this list a "conf/layer.conf" file will be searched for and parsed with the LAYERDIR variable being set to the directory where the layer was found. The idea is these files will setup BBPATH and other variables correctly for a given build directory automatically for the user.</para>
- <para>Bitbake will then expect to find 'conf/bitbake.conf' somewhere in the user specified <envar>BBPATH</envar>. That configuration file generally has include directives to pull in any other metadata (generally files specific to architecture, machine, <emphasis>local</emphasis> and so on.</para>
- <para>Only variable definitions and include directives are allowed in .conf files.</para>
- </section>
- <section>
- <title>Classes</title>
- <para>BitBake classes are our rudimentary inheritance mechanism. As briefly mentioned in the metadata introduction, they're parsed when an <literal>inherit</literal> directive is encountered, and they are located in classes/ relative to the dirs in <envar>BBPATH</envar>.</para>
- </section>
- <section>
- <title>.bb Files</title>
- <para>A BitBake (.bb) file is a logical unit of tasks to be executed. Normally this is a package to be built. Inter-.bb dependencies are obeyed. The files themselves are located via the <varname>BBFILES</varname> variable, which is set to a space separated list of .bb files, and does handle wildcards.</para>
- </section>
- </section>
- </chapter>
-
- <chapter>
- <title>File Download support</title>
- <section>
- <title>Overview</title>
- <para>BitBake provides support to download files this procedure is called fetching. The SRC_URI is normally used to indicate BitBake which files to fetch. The next sections will describe th available fetchers and the options they have. Each Fetcher honors a set of Variables and
-a per URI parameters separated by a <quote>;</quote> consisting of a key and a value. The semantic of the Variables and Parameters are defined by the Fetcher. BitBakes tries to have a consistent semantic between the different Fetchers.
- </para>
- </section>
-
- <section>
- <title>Local File Fetcher</title>
- <para>The URN for the Local File Fetcher is <emphasis>file</emphasis>. The filename can be either absolute or relative. If the filename is relative <varname>FILESPATH</varname> and <varname>FILESDIR</varname> will be used to find the appropriate relative file depending on the <varname>OVERRIDES</varname>. Single files and complete directories can be specified.
-<screen><varname>SRC_URI</varname>= "file://relativefile.patch"
-<varname>SRC_URI</varname>= "file://relativefile.patch;this=ignored"
-<varname>SRC_URI</varname>= "file:///Users/ich/very_important_software"
-</screen>
- </para>
- </section>
-
- <section>
- <title>CVS File Fetcher</title>
- <para>The URN for the CVS Fetcher is <emphasis>cvs</emphasis>. This Fetcher honors the variables <varname>DL_DIR</varname>, <varname>SRCDATE</varname>, <varname>FETCHCOMMAND_cvs</varname>, <varname>UPDATECOMMAND_cvs</varname>. <varname>DL_DIR</varname> specifies where a temporary checkout is saved, <varname>SRCDATE</varname> specifies which date to use when doing the fetching (the special value of "now" will cause the checkout to be updated on every build), <varname>FETCHCOMMAND</varname> and <varname>UPDATECOMMAND</varname> specify which executables should be used when doing the CVS checkout or update.
- </para>
- <para>The supported Parameters are <varname>module</varname>, <varname>tag</varname>, <varname>date</varname>, <varname>method</varname>, <varname>localdir</varname>, <varname>rsh</varname> and <varname>scmdata</varname>. The <varname>module</varname> specifies which module to check out, the <varname>tag</varname> describes which CVS TAG should be used for the checkout. By default the TAG is empty. A <varname>date</varname> can be specified to override the SRCDATE of the configuration to checkout a specific date. The special value of "now" will cause the checkout to be updated on every build.<varname>method</varname> is by default <emphasis>pserver</emphasis>, if <emphasis>ext</emphasis> is used the <varname>rsh</varname> parameter will be evaluated and <varname>CVS_RSH</varname> will be set. Finally <varname>localdir</varname> is used to checkout into a special directory relative to <varname>CVSDIR</varname>. If <varname>scmdata</varname> is set to <quote>keep</quote>
-<screen><varname>SRC_URI</varname> = "cvs://CVSROOT;module=mymodule;tag=some-version;method=ext"
-<varname>SRC_URI</varname> = "cvs://CVSROOT;module=mymodule;date=20060126;localdir=usethat"
-</screen>
- </para>
- </section>
-
- <section>
- <title>HTTP/FTP Fetcher</title>
- <para>The URNs for the HTTP/FTP are <emphasis>http</emphasis>, <emphasis>https</emphasis> and <emphasis>ftp</emphasis>. This Fetcher honors the variables <varname>DL_DIR</varname>, <varname>FETCHCOMMAND_wget</varname>, <varname>PREMIRRORS</varname>, <varname>MIRRORS</varname>. The <varname>DL_DIR</varname> defines where to store the fetched file, <varname>FETCHCOMMAND</varname> contains the command used for fetching. <quote>${URI}</quote> and <quote>${FILES}</quote> will be replaced by the uri and basename of the to be fetched file. <varname>PREMIRRORS</varname>
-will be tried first when fetching a file if that fails the actual file will be tried and finally all <varname>MIRRORS</varname> will be tried.
- </para>
- <para>The only supported Parameter is <varname>md5sum</varname>. After a fetch the <varname>md5sum</varname> of the file will be calculated and the two sums will be compared.
- </para>
- <para><screen><varname>SRC_URI</varname> = "http://oe.handhelds.org/not_there.aac;md5sum=12343"
-<varname>SRC_URI</varname> = "ftp://oe.handhelds.org/not_there_as_well.aac;md5sum=1234"
-<varname>SRC_URI</varname> = "ftp://you@oe.handheld.sorg/home/you/secret.plan;md5sum=1234"
-</screen></para>
- </section>
-
- <section>
- <title>SVK Fetcher</title>
- <para>
- <emphasis>Currently NOT supported</emphasis>
- </para>
- </section>
-
- <section>
- <title>SVN Fetcher</title>
- <para>The URN for the SVN Fetcher is <emphasis>svn</emphasis>.
- </para>
- <para>This Fetcher honors the variables <varname>FETCHCOMMAND_svn</varname>, <varname>DL_DIR</varname>, <varname>SRCDATE</varname>. <varname>FETCHCOMMAND</varname> contains the subversion command, <varname>DL_DIR</varname> is the directory where tarballs will be saved, <varname>SRCDATE</varname> specifies which date to use when doing the fetching (the special value of "now" will cause the checkout to be updated on every build).
- </para>
- <para>The supported Parameters are <varname>proto</varname>, <varname>rev</varname> and <varname>scmdata</varname>. <varname>proto</varname> is the subversion protocol, <varname>rev</varname> is the subversion revision. If <varname>scmdata</varname> is set to <quote>keep</quote>, the <quote>.svn</quote> directories will be available during compile-time.
- </para>
- <para><screen><varname>SRC_URI</varname> = "svn://svn.oe.handhelds.org/svn;module=vip;proto=http;rev=667"
-<varname>SRC_URI</varname> = "svn://svn.oe.handhelds.org/svn/;module=opie;proto=svn+ssh;date=20060126"
-</screen></para>
- </section>
-
- <section>
- <title>GIT Fetcher</title>
- <para>The URN for the GIT Fetcher is <emphasis>git</emphasis>.
- </para>
- <para>The Variables <varname>DL_DIR</varname>, <varname>GITDIR</varname> are used. <varname>DL_DIR</varname> will be used to store the checkedout version. <varname>GITDIR</varname> will be used as the base directory where the git tree is cloned to.
- </para>
- <para>The Parameters are <emphasis>tag</emphasis>, <emphasis>protocol</emphasis> and <emphasis>scmdata</emphasis>. <emphasis>tag</emphasis> is a git tag, the default is <quote>master</quote>. <emphasis>protocol</emphasis> is the git protocol to use and defaults to <quote>rsync</quote>. If <emphasis>scmdata</emphasis> is set to <quote>keep</quote>, the <quote>.git</quote> directory will be available during compile-time.
- </para>
- <para><screen><varname>SRC_URI</varname> = "git://git.oe.handhelds.org/git/vip.git;tag=version-1"
-<varname>SRC_URI</varname> = "git://git.oe.handhelds.org/git/vip.git;protocol=http"
- </screen></para>
- </section>
-
- </chapter>
-
-
- <chapter>
- <title>The bitbake command</title>
- <section>
- <title>Introduction</title>
- <para>bitbake is the primary command in the system. It facilitates executing tasks in a single .bb file, or executing a given task on a set of multiple .bb files, accounting for interdependencies amongst them.</para>
- </section>
- <section>
- <title>Usage and Syntax</title>
- <para>
- <screen><prompt>$ </prompt>bitbake --help
-usage: bitbake [options] [package ...]
-
-Executes the specified task (default is 'build') for a given set of BitBake files.
-It expects that BBFILES is defined, which is a space separated list of files to
-be executed. BBFILES does support wildcards.
-Default BBFILES are the .bb files in the current directory.
-
-options:
- --version show program's version number and exit
- -h, --help show this help message and exit
- -b BUILDFILE, --buildfile=BUILDFILE
- execute the task against this .bb file, rather than a
- package from BBFILES.
- -k, --continue continue as much as possible after an error. While the
- target that failed, and those that depend on it,
- cannot be remade, the other dependencies of these
- targets can be processed all the same.
- -f, --force force run of specified cmd, regardless of stamp status
- -i, --interactive drop into the interactive mode also called the BitBake
- shell.
- -c CMD, --cmd=CMD Specify task to execute. Note that this only executes
- the specified task for the providee and the packages
- it depends on, i.e. 'compile' does not implicitly call
- stage for the dependencies (IOW: use only if you know
- what you are doing). Depending on the base.bbclass a
- listtasks task is defined and will show available
- tasks
- -r FILE, --read=FILE read the specified file before bitbake.conf
- -v, --verbose output more chit-chat to the terminal
- -D, --debug Increase the debug level. You can specify this more
- than once.
- -n, --dry-run don't execute, just go through the motions
- -p, --parse-only quit after parsing the BB files (developers only)
- -d, --disable-psyco disable using the psyco just-in-time compiler (not
- recommended)
- -s, --show-versions show current and preferred versions of all packages
- -e, --environment show the global or per-package environment (this is
- what used to be bbread)
- -g, --graphviz emit the dependency trees of the specified packages in
- the dot syntax
- -I IGNORED_DOT_DEPS, --ignore-deps=IGNORED_DOT_DEPS
- Stop processing at the given list of dependencies when
- generating dependency graphs. This can help to make
- the graph more appealing
- -l DEBUG_DOMAINS, --log-domains=DEBUG_DOMAINS
- Show debug logging for the specified logging domains
- -P, --profile profile the command and print a report
-
-
-</screen>
- </para>
- <para>
- <example>
- <title>Executing a task against a single .bb</title>
- <para>Executing tasks for a single file is relatively simple. You specify the file in question, and bitbake parses it and executes the specified task (or <quote>build</quote> by default). It obeys intertask dependencies when doing so.</para>
- <para><quote>clean</quote> task:</para>
- <para><screen><prompt>$ </prompt>bitbake -b blah_1.0.bb -c clean</screen></para>
- <para><quote>build</quote> task:</para>
- <para><screen><prompt>$ </prompt>bitbake -b blah_1.0.bb</screen></para>
- </example>
- </para>
- <para>
- <example>
- <title>Executing tasks against a set of .bb files</title>
- <para>There are a number of additional complexities introduced when one wants to manage multiple .bb files. Clearly there needs to be a way to tell bitbake what files are available, and of those, which we want to execute at this time. There also needs to be a way for each .bb to express its dependencies, both for build time and runtime. There must be a way for the user to express their preferences when multiple .bb's provide the same functionality, or when there are multiple versions of a .bb.</para>
- <para>The next section, Metadata, outlines how one goes about specifying such things.</para>
- <para>Note that the bitbake command, when not using --buildfile, accepts a <varname>PROVIDER</varname>, not a filename or anything else. By default, a .bb generally PROVIDES its packagename, packagename-version, and packagename-version-revision.</para>
- <screen><prompt>$ </prompt>bitbake blah</screen>
- <screen><prompt>$ </prompt>bitbake blah-1.0</screen>
- <screen><prompt>$ </prompt>bitbake blah-1.0-r0</screen>
- <screen><prompt>$ </prompt>bitbake -c clean blah</screen>
- <screen><prompt>$ </prompt>bitbake virtual/whatever</screen>
- <screen><prompt>$ </prompt>bitbake -c clean virtual/whatever</screen>
- </example>
- <example>
- <title>Generating dependency graphs</title>
- <para>BitBake is able to generate dependency graphs using the dot syntax. These graphs can be converted
-to images using the <application>dot</application> application from <ulink url="http://www.graphviz.org">graphviz</ulink>.
-Two files will be written into the current working directory, <emphasis>depends.dot</emphasis> containing dependency information at the package level and <emphasis>task-depends.dot</emphasis> containing a breakdown of the dependencies at the task level. To stop depending on common depends one can use the <prompt>-I depend</prompt> to omit these from the graph. This can lead to more readable graphs. E.g. this way <varname>DEPENDS</varname> from inherited classes, e.g. base.bbclass, can be removed from the graph.</para>
- <screen><prompt>$ </prompt>bitbake -g blah</screen>
- <screen><prompt>$ </prompt>bitbake -g -I virtual/whatever -I bloom blah</screen>
- </example>
- </para>
- </section>
- <section>
- <title>Special variables</title>
- <para>Certain variables affect bitbake operation:</para>
- <section>
- <title><varname>BB_NUMBER_THREADS</varname></title>
- <para> The number of threads bitbake should run at once (default: 1).</para>
- </section>
- </section>
- <section>
- <title>Metadata</title>
- <para>As you may have seen in the usage information, or in the information about .bb files, the BBFILES variable is how the bitbake tool locates its files. This variable is a space separated list of files that are available, and supports wildcards.
- <example>
- <title>Setting BBFILES</title>
- <programlisting><varname>BBFILES</varname> = "/path/to/bbfiles/*.bb"</programlisting>
- </example></para>
- <para>With regard to dependencies, it expects the .bb to define a <varname>DEPENDS</varname> variable, which contains a space separated list of <quote>package names</quote>, which themselves are the <varname>PN</varname> variable. The <varname>PN</varname> variable is, in general, by default, set to a component of the .bb filename.</para>
- <example>
- <title>Depending on another .bb</title>
- <para>a.bb:
- <screen>PN = "package-a"
-DEPENDS += "package-b"</screen>
- </para>
- <para>b.bb:
- <screen>PN = "package-b"</screen>
- </para>
- </example>
- <example>
- <title>Using PROVIDES</title>
- <para>This example shows the usage of the PROVIDES variable, which allows a given .bb to specify what functionality it provides.</para>
- <para>package1.bb:
- <screen>PROVIDES += "virtual/package"</screen>
- </para>
- <para>package2.bb:
- <screen>DEPENDS += "virtual/package"</screen>
- </para>
- <para>package3.bb:
- <screen>PROVIDES += "virtual/package"</screen>
- </para>
- <para>As you can see, here there are two different .bb's that provide the same functionality (virtual/package). Clearly, there needs to be a way for the person running bitbake to control which of those providers gets used. There is, indeed, such a way.</para>
- <para>The following would go into a .conf file, to select package1:
- <screen>PREFERRED_PROVIDER_virtual/package = "package1"</screen>
- </para>
- </example>
- <example>
- <title>Specifying version preference</title>
- <para>When there are multiple <quote>versions</quote> of a given package, bitbake defaults to selecting the most recent version, unless otherwise specified. If the .bb in question has a <varname>DEFAULT_PREFERENCE</varname> set lower than the other .bb's (default is 0), then it will not be selected. This allows the person or persons maintaining the repository of .bb files to specify their preferences for the default selected version. In addition, the user can specify their preferences with regard to version.</para>
- <para>If the first .bb is named <filename>a_1.1.bb</filename>, then the <varname>PN</varname> variable will be set to <quote>a</quote>, and the <varname>PV</varname> variable will be set to 1.1.</para>
- <para>If we then have an <filename>a_1.2.bb</filename>, bitbake will choose 1.2 by default. However, if we define the following variable in a .conf that bitbake parses, we can change that.
- <screen>PREFERRED_VERSION_a = "1.1"</screen>
- </para>
- </example>
- <example>
- <title>Using <quote>bbfile collections</quote></title>
- <para>bbfile collections exist to allow the user to have multiple repositories of bbfiles that contain the same exact package. For example, one could easily use them to make one's own local copy of an upstream repository, but with custom modifications that one does not want upstream. Usage:</para>
- <screen>BBFILES = "/stuff/openembedded/*/*.bb /stuff/openembedded.modified/*/*.bb"
-BBFILE_COLLECTIONS = "upstream local"
-BBFILE_PATTERN_upstream = "^/stuff/openembedded/"
-BBFILE_PATTERN_local = "^/stuff/openembedded.modified/"
-BBFILE_PRIORITY_upstream = "5"
-BBFILE_PRIORITY_local = "10"</screen>
- </example>
- </section>
- </chapter>
-</book>
diff --git a/bitbake/lib/bb/COW.py b/bitbake/lib/bb/COW.py
deleted file mode 100644
index 6917ec378a..0000000000
--- a/bitbake/lib/bb/COW.py
+++ /dev/null
@@ -1,323 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# This is a copy on write dictionary and set which abuses classes to try and be nice and fast.
-#
-# Copyright (C) 2006 Tim Amsell
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-#Please Note:
-# Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW.
-# Assign a file to __warn__ to get warnings about slow operations.
-#
-
-from __future__ import print_function
-import copy
-import types
-ImmutableTypes = (
- types.NoneType,
- bool,
- complex,
- float,
- int,
- long,
- tuple,
- frozenset,
- basestring
-)
-
-MUTABLE = "__mutable__"
-
-class COWMeta(type):
- pass
-
-class COWDictMeta(COWMeta):
- __warn__ = False
- __hasmutable__ = False
- __marker__ = tuple()
-
- def __str__(cls):
- # FIXME: I have magic numbers!
- return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3)
- __repr__ = __str__
-
- def cow(cls):
- class C(cls):
- __count__ = cls.__count__ + 1
- return C
- copy = cow
- __call__ = cow
-
- def __setitem__(cls, key, value):
- if not isinstance(value, ImmutableTypes):
- if not isinstance(value, COWMeta):
- cls.__hasmutable__ = True
- key += MUTABLE
- setattr(cls, key, value)
-
- def __getmutable__(cls, key, readonly=False):
- nkey = key + MUTABLE
- try:
- return cls.__dict__[nkey]
- except KeyError:
- pass
-
- value = getattr(cls, nkey)
- if readonly:
- return value
-
- if not cls.__warn__ is False and not isinstance(value, COWMeta):
- print("Warning: Doing a copy because %s is a mutable type." % key, file=cls.__warn__)
- try:
- value = value.copy()
- except AttributeError as e:
- value = copy.copy(value)
- setattr(cls, nkey, value)
- return value
-
- __getmarker__ = []
- def __getreadonly__(cls, key, default=__getmarker__):
- """\
- Get a value (even if mutable) which you promise not to change.
- """
- return cls.__getitem__(key, default, True)
-
- def __getitem__(cls, key, default=__getmarker__, readonly=False):
- try:
- try:
- value = getattr(cls, key)
- except AttributeError:
- value = cls.__getmutable__(key, readonly)
-
- # This is for values which have been deleted
- if value is cls.__marker__:
- raise AttributeError("key %s does not exist." % key)
-
- return value
- except AttributeError as e:
- if not default is cls.__getmarker__:
- return default
-
- raise KeyError(str(e))
-
- def __delitem__(cls, key):
- cls.__setitem__(key, cls.__marker__)
-
- def __revertitem__(cls, key):
- if not cls.__dict__.has_key(key):
- key += MUTABLE
- delattr(cls, key)
-
- def __contains__(cls, key):
- return cls.has_key(key)
-
- def has_key(cls, key):
- value = cls.__getreadonly__(key, cls.__marker__)
- if value is cls.__marker__:
- return False
- return True
-
- def iter(cls, type, readonly=False):
- for key in dir(cls):
- if key.startswith("__"):
- continue
-
- if key.endswith(MUTABLE):
- key = key[:-len(MUTABLE)]
-
- if type == "keys":
- yield key
-
- try:
- if readonly:
- value = cls.__getreadonly__(key)
- else:
- value = cls[key]
- except KeyError:
- continue
-
- if type == "values":
- yield value
- if type == "items":
- yield (key, value)
- raise StopIteration()
-
- def iterkeys(cls):
- return cls.iter("keys")
- def itervalues(cls, readonly=False):
- if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
- print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__)
- return cls.iter("values", readonly)
- def iteritems(cls, readonly=False):
- if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
- print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__)
- return cls.iter("items", readonly)
-
-class COWSetMeta(COWDictMeta):
- def __str__(cls):
- # FIXME: I have magic numbers!
- return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) -3)
- __repr__ = __str__
-
- def cow(cls):
- class C(cls):
- __count__ = cls.__count__ + 1
- return C
-
- def add(cls, value):
- COWDictMeta.__setitem__(cls, repr(hash(value)), value)
-
- def remove(cls, value):
- COWDictMeta.__delitem__(cls, repr(hash(value)))
-
- def __in__(cls, value):
- return COWDictMeta.has_key(repr(hash(value)))
-
- def iterkeys(cls):
- raise TypeError("sets don't have keys")
-
- def iteritems(cls):
- raise TypeError("sets don't have 'items'")
-
-# These are the actual classes you use!
-class COWDictBase(object):
- __metaclass__ = COWDictMeta
- __count__ = 0
-
-class COWSetBase(object):
- __metaclass__ = COWSetMeta
- __count__ = 0
-
-if __name__ == "__main__":
- import sys
- COWDictBase.__warn__ = sys.stderr
- a = COWDictBase()
- print("a", a)
-
- a['a'] = 'a'
- a['b'] = 'b'
- a['dict'] = {}
-
- b = a.copy()
- print("b", b)
- b['c'] = 'b'
-
- print()
-
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems():
- print(x)
- print()
-
- b['dict']['a'] = 'b'
- b['a'] = 'c'
-
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems():
- print(x)
- print()
-
- try:
- b['dict2']
- except KeyError as e:
- print("Okay!")
-
- a['set'] = COWSetBase()
- a['set'].add("o1")
- a['set'].add("o1")
- a['set'].add("o2")
-
- print("a", a)
- for x in a['set'].itervalues():
- print(x)
- print("--")
- print("b", b)
- for x in b['set'].itervalues():
- print(x)
- print()
-
- b['set'].add('o3')
-
- print("a", a)
- for x in a['set'].itervalues():
- print(x)
- print("--")
- print("b", b)
- for x in b['set'].itervalues():
- print(x)
- print()
-
- a['set2'] = set()
- a['set2'].add("o1")
- a['set2'].add("o1")
- a['set2'].add("o2")
-
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems(readonly=True):
- print(x)
- print()
-
- del b['b']
- try:
- print(b['b'])
- except KeyError:
- print("Yay! deleted key raises error")
-
- if b.has_key('b'):
- print("Boo!")
- else:
- print("Yay - has_key with delete works!")
-
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems(readonly=True):
- print(x)
- print()
-
- b.__revertitem__('b')
-
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems(readonly=True):
- print(x)
- print()
-
- b.__revertitem__('dict')
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems(readonly=True):
- print(x)
- print()
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
deleted file mode 100644
index 4c7afc9c21..0000000000
--- a/bitbake/lib/bb/__init__.py
+++ /dev/null
@@ -1,139 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# BitBake Build System Python Library
-#
-# Copyright (C) 2003 Holger Schurig
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# Based on Gentoo's portage.py.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-__version__ = "1.11.0"
-
-import sys
-if sys.version_info < (2, 6, 0):
- raise RuntimeError("Sorry, python 2.6.0 or later is required for this version of bitbake")
-
-import os
-import logging
-import traceback
-
-class NullHandler(logging.Handler):
- def emit(self, record):
- pass
-
-Logger = logging.getLoggerClass()
-class BBLogger(Logger):
- def __init__(self, name):
- if name.split(".")[0] == "BitBake":
- self.debug = self.bbdebug
- Logger.__init__(self, name)
-
- def bbdebug(self, level, msg, *args, **kwargs):
- return self.log(logging.DEBUG - level + 1, msg, *args, **kwargs)
-
- def plain(self, msg, *args, **kwargs):
- return self.log(logging.INFO + 1, msg, *args, **kwargs)
-
- def verbose(self, msg, *args, **kwargs):
- return self.log(logging.INFO - 1, msg, *args, **kwargs)
-
- def exception(self, msg, *args, **kwargs):
- return self.critical("%s\n%s" % (msg, traceback.format_exc()), *args, **kwargs)
-
-logging.raiseExceptions = False
-logging.setLoggerClass(BBLogger)
-
-logger = logging.getLogger("BitBake")
-logger.addHandler(NullHandler())
-logger.setLevel(logging.INFO)
-
-# This has to be imported after the setLoggerClass, as the import of bb.msg
-# can result in construction of the various loggers.
-import bb.msg
-
-if "BBDEBUG" in os.environ:
- level = int(os.environ["BBDEBUG"])
- if level:
- bb.msg.set_debug_level(level)
-
-if True or os.environ.get("BBFETCH2"):
- from bb import fetch2 as fetch
- sys.modules['bb.fetch'] = sys.modules['bb.fetch2']
-
-# Messaging convenience functions
-def plain(*args):
- logger.plain(''.join(args))
-
-def debug(lvl, *args):
- logger.debug(lvl, ''.join(args))
-
-def note(*args):
- logger.info(''.join(args))
-
-def warn(*args):
- logger.warn(''.join(args))
-
-def error(*args):
- logger.error(''.join(args))
-
-def fatal(*args):
- logger.critical(''.join(args))
- sys.exit(1)
-
-
-def deprecated(func, name = None, advice = ""):
- """This is a decorator which can be used to mark functions
- as deprecated. It will result in a warning being emmitted
- when the function is used."""
- import warnings
-
- if advice:
- advice = ": %s" % advice
- if name is None:
- name = func.__name__
-
- def newFunc(*args, **kwargs):
- warnings.warn("Call to deprecated function %s%s." % (name,
- advice),
- category = PendingDeprecationWarning,
- stacklevel = 2)
- return func(*args, **kwargs)
- newFunc.__name__ = func.__name__
- newFunc.__doc__ = func.__doc__
- newFunc.__dict__.update(func.__dict__)
- return newFunc
-
-# For compatibility
-def deprecate_import(current, modulename, fromlist, renames = None):
- """Import objects from one module into another, wrapping them with a DeprecationWarning"""
- import sys
-
- module = __import__(modulename, fromlist = fromlist)
- for position, objname in enumerate(fromlist):
- obj = getattr(module, objname)
- newobj = deprecated(obj, "{0}.{1}".format(current, objname),
- "Please use {0}.{1} instead".format(modulename, objname))
- if renames:
- newname = renames[position]
- else:
- newname = objname
-
- setattr(sys.modules[current], newname, newobj)
-
-deprecate_import(__name__, "bb.fetch", ("MalformedUrl", "encodeurl", "decodeurl"))
-deprecate_import(__name__, "bb.utils", ("mkdirhier", "movefile", "copyfile", "which"))
-deprecate_import(__name__, "bb.utils", ["vercmp_string"], ["vercmp"])
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
deleted file mode 100644
index a7664bd36d..0000000000
--- a/bitbake/lib/bb/build.py
+++ /dev/null
@@ -1,472 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# BitBake 'Build' implementation
-#
-# Core code for function execution and task handling in the
-# BitBake build tools.
-#
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# Based on Gentoo's portage.py.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-#Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import sys
-import logging
-import bb
-import bb.msg
-import bb.process
-from contextlib import nested
-from bb import data, event, mkdirhier, utils
-
-bblogger = logging.getLogger('BitBake')
-logger = logging.getLogger('BitBake.Build')
-
-NULL = open(os.devnull, 'r+')
-
-
-# When we execute a python function we'd like certain things
-# in all namespaces, hence we add them to __builtins__
-# If we do not do this and use the exec globals, they will
-# not be available to subfunctions.
-__builtins__['bb'] = bb
-__builtins__['os'] = os
-
-class FuncFailed(Exception):
- def __init__(self, name = None, logfile = None):
- self.logfile = logfile
- self.name = name
- if name:
- self.msg = "Function '%s' failed" % name
- else:
- self.msg = "Function failed"
-
- def __str__(self):
- if self.logfile and os.path.exists(self.logfile):
- msg = ("%s (see %s for further information)" %
- (self.msg, self.logfile))
- else:
- msg = self.msg
- return msg
-
-class TaskBase(event.Event):
- """Base class for task events"""
-
- def __init__(self, t, d ):
- self._task = t
- self._package = bb.data.getVar("PF", d, 1)
- event.Event.__init__(self)
- self._message = "package %s: task %s: %s" % (bb.data.getVar("PF", d, 1), t, bb.event.getName(self)[4:])
-
- def getTask(self):
- return self._task
-
- def setTask(self, task):
- self._task = task
-
- task = property(getTask, setTask, None, "task property")
-
-class TaskStarted(TaskBase):
- """Task execution started"""
-
-class TaskSucceeded(TaskBase):
- """Task execution completed"""
-
-class TaskFailed(TaskBase):
- """Task execution failed"""
-
- def __init__(self, task, logfile, metadata):
- self.logfile = logfile
- super(TaskFailed, self).__init__(task, metadata)
-
-class TaskInvalid(TaskBase):
-
- def __init__(self, task, metadata):
- super(TaskInvalid, self).__init__(task, metadata)
- self._message = "No such task '%s'" % task
-
-
-class LogTee(object):
- def __init__(self, logger, outfile):
- self.outfile = outfile
- self.logger = logger
- self.name = self.outfile.name
-
- def write(self, string):
- self.logger.plain(string)
- self.outfile.write(string)
-
- def __enter__(self):
- self.outfile.__enter__()
- return self
-
- def __exit__(self, *excinfo):
- self.outfile.__exit__(*excinfo)
-
- def __repr__(self):
- return '<LogTee {0}>'.format(self.name)
-
-
-def exec_func(func, d, dirs = None):
- """Execute an BB 'function'"""
-
- body = data.getVar(func, d)
- if not body:
- if body is None:
- logger.warn("Function %s doesn't exist", func)
- return
-
- flags = data.getVarFlags(func, d)
- cleandirs = flags.get('cleandirs')
- if cleandirs:
- for cdir in data.expand(cleandirs, d).split():
- bb.utils.remove(cdir, True)
-
- if dirs is None:
- dirs = flags.get('dirs')
- if dirs:
- dirs = data.expand(dirs, d).split()
-
- if dirs:
- for adir in dirs:
- bb.utils.mkdirhier(adir)
- adir = dirs[-1]
- else:
- adir = data.getVar('B', d, 1)
- if not os.path.exists(adir):
- adir = None
-
- ispython = flags.get('python')
- if flags.get('fakeroot') and not flags.get('task'):
- bb.fatal("Function %s specifies fakeroot but isn't a task?!" % func)
-
- lockflag = flags.get('lockfiles')
- if lockflag:
- lockfiles = [data.expand(f, d) for f in lockflag.split()]
- else:
- lockfiles = None
-
- tempdir = data.getVar('T', d, 1)
- runfile = os.path.join(tempdir, 'run.{0}.{1}'.format(func, os.getpid()))
-
- with bb.utils.fileslocked(lockfiles):
- if ispython:
- exec_func_python(func, d, runfile, cwd=adir)
- else:
- exec_func_shell(func, d, runfile, cwd=adir)
-
-_functionfmt = """
-def {function}(d):
-{body}
-
-{function}(d)
-"""
-logformatter = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
-def exec_func_python(func, d, runfile, cwd=None):
- """Execute a python BB 'function'"""
-
- bbfile = d.getVar('FILE', True)
- try:
- olddir = os.getcwd()
- except OSError:
- olddir = None
- code = _functionfmt.format(function=func, body=d.getVar(func, True))
- bb.utils.mkdirhier(os.path.dirname(runfile))
- with open(runfile, 'w') as script:
- script.write(code)
-
- if cwd:
- os.chdir(cwd)
-
- try:
- comp = utils.better_compile(code, func, bbfile)
- utils.better_exec(comp, {"d": d}, code, bbfile)
- except:
- if sys.exc_info()[0] in (bb.parse.SkipPackage, bb.build.FuncFailed):
- raise
-
- raise FuncFailed(func, None)
- finally:
- if olddir:
- os.chdir(olddir)
-
-def exec_func_shell(function, d, runfile, cwd=None):
- """Execute a shell function from the metadata
-
- Note on directory behavior. The 'dirs' varflag should contain a list
- of the directories you need created prior to execution. The last
- item in the list is where we will chdir/cd to.
- """
-
- # Don't let the emitted shell script override PWD
- d.delVarFlag('PWD', 'export')
-
- with open(runfile, 'w') as script:
- script.write('#!/bin/sh -e\n')
- if logger.isEnabledFor(logging.DEBUG):
- script.write("set -x\n")
- data.emit_func(function, script, d)
-
- script.write("%s\n" % function)
- os.fchmod(script.fileno(), 0775)
-
- env = {
- 'PATH': d.getVar('PATH', True),
- 'LC_ALL': 'C',
- }
-
- cmd = runfile
-
- if logger.isEnabledFor(logging.DEBUG):
- logfile = LogTee(logger, sys.stdout)
- else:
- logfile = sys.stdout
-
- try:
- bb.process.run(cmd, env=env, cwd=cwd, shell=False, stdin=NULL,
- log=logfile)
- except bb.process.CmdError:
- logfn = d.getVar('BB_LOGFILE', True)
- raise FuncFailed(function, logfn)
-
-def _task_data(fn, task, d):
- localdata = data.createCopy(d)
- localdata.setVar('BB_FILENAME', fn)
- localdata.setVar('BB_CURRENTTASK', task[3:])
- localdata.setVar('OVERRIDES', 'task-%s:%s' %
- (task[3:], d.getVar('OVERRIDES', False)))
- localdata.finalize()
- data.expandKeys(localdata)
- return localdata
-
-def _exec_task(fn, task, d, quieterr):
- """Execute a BB 'task'
-
- Execution of a task involves a bit more setup than executing a function,
- running it with its own local metadata, and with some useful variables set.
- """
- if not data.getVarFlag(task, 'task', d):
- event.fire(TaskInvalid(task, d), d)
- logger.error("No such task: %s" % task)
- return 1
-
- logger.debug(1, "Executing task %s", task)
-
- localdata = _task_data(fn, task, d)
- tempdir = localdata.getVar('T', True)
- if not tempdir:
- bb.fatal("T variable not set, unable to build")
-
- bb.utils.mkdirhier(tempdir)
- loglink = os.path.join(tempdir, 'log.{0}'.format(task))
- logfn = os.path.join(tempdir, 'log.{0}.{1}'.format(task, os.getpid()))
- if loglink:
- bb.utils.remove(loglink)
-
- try:
- os.symlink(logfn, loglink)
- except OSError:
- pass
-
- prefuncs = localdata.getVarFlag(task, 'prefuncs', expand=True)
- postfuncs = localdata.getVarFlag(task, 'postfuncs', expand=True)
-
- # Handle logfiles
- si = file('/dev/null', 'r')
- try:
- logfile = file(logfn, 'w')
- except OSError:
- logger.exception("Opening log file '%s'", logfn)
- pass
-
- # Dup the existing fds so we dont lose them
- osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()]
- oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()]
- ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()]
-
- # Replace those fds with our own
- os.dup2(si.fileno(), osi[1])
- os.dup2(logfile.fileno(), oso[1])
- os.dup2(logfile.fileno(), ose[1])
-
- # Ensure python logging goes to the logfile
- handler = logging.StreamHandler(logfile)
- handler.setFormatter(logformatter)
- bblogger.addHandler(handler)
-
- localdata.setVar('BB_LOGFILE', logfn)
-
- event.fire(TaskStarted(task, localdata), localdata)
- try:
- for func in (prefuncs or '').split():
- exec_func(func, localdata)
- exec_func(task, localdata)
- for func in (postfuncs or '').split():
- exec_func(func, localdata)
- except FuncFailed as exc:
- if not quieterr:
- logger.error(str(exc))
- event.fire(TaskFailed(task, logfn, localdata), localdata)
- return 1
- finally:
- sys.stdout.flush()
- sys.stderr.flush()
-
- bblogger.removeHandler(handler)
-
- # Restore the backup fds
- os.dup2(osi[0], osi[1])
- os.dup2(oso[0], oso[1])
- os.dup2(ose[0], ose[1])
-
- # Close the backup fds
- os.close(osi[0])
- os.close(oso[0])
- os.close(ose[0])
- si.close()
-
- logfile.close()
- if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
- logger.debug(2, "Zero size logfn %s, removing", logfn)
- bb.utils.remove(logfn)
- bb.utils.remove(loglink)
- event.fire(TaskSucceeded(task, localdata), localdata)
-
- if not localdata.getVarFlag(task, 'nostamp') and not localdata.getVarFlag(task, 'selfstamp'):
- make_stamp(task, localdata)
-
- return 0
-
-def exec_task(fn, task, d):
- try:
- quieterr = False
- if d.getVarFlag(task, "quieterrors") is not None:
- quieterr = True
-
- return _exec_task(fn, task, d, quieterr)
- except Exception:
- from traceback import format_exc
- if not quieterr:
- logger.error("Build of %s failed" % (task))
- logger.error(format_exc())
- failedevent = TaskFailed(task, None, d)
- event.fire(failedevent, d)
- return 1
-
-def stamp_internal(taskname, d, file_name):
- """
- Internal stamp helper function
- Makes sure the stamp directory exists
- Returns the stamp path+filename
-
- In the bitbake core, d can be a CacheData and file_name will be set.
- When called in task context, d will be a data store, file_name will not be set
- """
- taskflagname = taskname
- if taskname.endswith("_setscene") and taskname != "do_setscene":
- taskflagname = taskname.replace("_setscene", "")
-
- if file_name:
- stamp = d.stamp[file_name]
- extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
- else:
- stamp = d.getVar('STAMP', True)
- file_name = d.getVar('BB_FILENAME', True)
- extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
-
- if not stamp:
- return
-
- stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo)
-
- bb.utils.mkdirhier(os.path.dirname(stamp))
-
- return stamp
-
-def make_stamp(task, d, file_name = None):
- """
- Creates/updates a stamp for a given task
- (d can be a data dict or dataCache)
- """
- stamp = stamp_internal(task, d, file_name)
- # Remove the file and recreate to force timestamp
- # change on broken NFS filesystems
- if stamp:
- bb.utils.remove(stamp)
- f = open(stamp, "w")
- f.close()
-
-def del_stamp(task, d, file_name = None):
- """
- Removes a stamp for a given task
- (d can be a data dict or dataCache)
- """
- stamp = stamp_internal(task, d, file_name)
- bb.utils.remove(stamp)
-
-def stampfile(taskname, d, file_name = None):
- """
- Return the stamp for a given task
- (d can be a data dict or dataCache)
- """
- return stamp_internal(taskname, d, file_name)
-
-def add_tasks(tasklist, d):
- task_deps = data.getVar('_task_deps', d)
- if not task_deps:
- task_deps = {}
- if not 'tasks' in task_deps:
- task_deps['tasks'] = []
- if not 'parents' in task_deps:
- task_deps['parents'] = {}
-
- for task in tasklist:
- task = data.expand(task, d)
- data.setVarFlag(task, 'task', 1, d)
-
- if not task in task_deps['tasks']:
- task_deps['tasks'].append(task)
-
- flags = data.getVarFlags(task, d)
- def getTask(name):
- if not name in task_deps:
- task_deps[name] = {}
- if name in flags:
- deptask = data.expand(flags[name], d)
- task_deps[name][task] = deptask
- getTask('depends')
- getTask('deptask')
- getTask('rdeptask')
- getTask('recrdeptask')
- getTask('nostamp')
- getTask('fakeroot')
- getTask('noexec')
- task_deps['parents'][task] = []
- for dep in flags['deps']:
- dep = data.expand(dep, d)
- task_deps['parents'][task].append(dep)
-
- # don't assume holding a reference
- data.setVar('_task_deps', task_deps, d)
-
-def remove_task(task, kill, d):
- """Remove an BB 'task'.
-
- If kill is 1, also remove tasks that depend on this task."""
-
- data.delVarFlag(task, 'task', d)
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
deleted file mode 100644
index 7ea04ac1a5..0000000000
--- a/bitbake/lib/bb/cache.py
+++ /dev/null
@@ -1,632 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# BitBake 'Event' implementation
-#
-# Caching of bitbake variables before task execution
-
-# Copyright (C) 2006 Richard Purdie
-
-# but small sections based on code from bin/bitbake:
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
-# Copyright (C) 2005 Holger Hans Peter Freyther
-# Copyright (C) 2005 ROAD GmbH
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-
-import os
-import logging
-from collections import defaultdict, namedtuple
-import bb.data
-import bb.utils
-
-logger = logging.getLogger("BitBake.Cache")
-
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- logger.info("Importing cPickle failed. "
- "Falling back to a very slow implementation.")
-
-__cache_version__ = "136"
-
-recipe_fields = (
- 'pn',
- 'pv',
- 'pr',
- 'pe',
- 'defaultpref',
- 'depends',
- 'provides',
- 'task_deps',
- 'stamp',
- 'stamp_extrainfo',
- 'broken',
- 'not_world',
- 'skipped',
- 'timestamp',
- 'packages',
- 'packages_dynamic',
- 'rdepends',
- 'rdepends_pkg',
- 'rprovides',
- 'rprovides_pkg',
- 'rrecommends',
- 'rrecommends_pkg',
- 'nocache',
- 'variants',
- 'file_depends',
- 'tasks',
- 'basetaskhashes',
- 'hashfilename',
-)
-
-
-class RecipeInfo(namedtuple('RecipeInfo', recipe_fields)):
- __slots__ = ()
-
- @classmethod
- def listvar(cls, var, metadata):
- return cls.getvar(var, metadata).split()
-
- @classmethod
- def intvar(cls, var, metadata):
- return int(cls.getvar(var, metadata) or 0)
-
- @classmethod
- def depvar(cls, var, metadata):
- return bb.utils.explode_deps(cls.getvar(var, metadata))
-
- @classmethod
- def pkgvar(cls, var, packages, metadata):
- return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata))
- for pkg in packages)
-
- @classmethod
- def taskvar(cls, var, tasks, metadata):
- return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
- for task in tasks)
-
- @classmethod
- def flaglist(cls, flag, varlist, metadata):
- return dict((var, metadata.getVarFlag(var, flag, True))
- for var in varlist)
-
- @classmethod
- def getvar(cls, var, metadata):
- return metadata.getVar(var, True) or ''
-
- @classmethod
- def make_optional(cls, default=None, **kwargs):
- """Construct the namedtuple from the specified keyword arguments,
- with every value considered optional, using the default value if
- it was not specified."""
- for field in cls._fields:
- kwargs[field] = kwargs.get(field, default)
- return cls(**kwargs)
-
- @classmethod
- def from_metadata(cls, filename, metadata):
- if cls.getvar('__SKIPPED', metadata):
- return cls.make_optional(skipped=True)
-
- tasks = metadata.getVar('__BBTASKS', False)
-
- pn = cls.getvar('PN', metadata)
- packages = cls.listvar('PACKAGES', metadata)
- if not pn in packages:
- packages.append(pn)
-
- return RecipeInfo(
- tasks = tasks,
- basetaskhashes = cls.taskvar('BB_BASEHASH', tasks, metadata),
- hashfilename = cls.getvar('BB_HASHFILENAME', metadata),
-
- file_depends = metadata.getVar('__depends', False),
- task_deps = metadata.getVar('_task_deps', False) or
- {'tasks': [], 'parents': {}},
- variants = cls.listvar('__VARIANTS', metadata) + [''],
-
- skipped = False,
- timestamp = bb.parse.cached_mtime(filename),
- packages = cls.listvar('PACKAGES', metadata),
- pn = pn,
- pe = cls.getvar('PE', metadata),
- pv = cls.getvar('PV', metadata),
- pr = cls.getvar('PR', metadata),
- nocache = cls.getvar('__BB_DONT_CACHE', metadata),
- defaultpref = cls.intvar('DEFAULT_PREFERENCE', metadata),
- broken = cls.getvar('BROKEN', metadata),
- not_world = cls.getvar('EXCLUDE_FROM_WORLD', metadata),
- stamp = cls.getvar('STAMP', metadata),
- stamp_extrainfo = cls.flaglist('stamp-extra-info', tasks, metadata),
- packages_dynamic = cls.listvar('PACKAGES_DYNAMIC', metadata),
- depends = cls.depvar('DEPENDS', metadata),
- provides = cls.depvar('PROVIDES', metadata),
- rdepends = cls.depvar('RDEPENDS', metadata),
- rprovides = cls.depvar('RPROVIDES', metadata),
- rrecommends = cls.depvar('RRECOMMENDS', metadata),
- rprovides_pkg = cls.pkgvar('RPROVIDES', packages, metadata),
- rdepends_pkg = cls.pkgvar('RDEPENDS', packages, metadata),
- rrecommends_pkg = cls.pkgvar('RRECOMMENDS', packages, metadata),
- )
-
-
-class Cache(object):
- """
- BitBake Cache implementation
- """
-
- def __init__(self, data):
- self.cachedir = bb.data.getVar("CACHE", data, True)
- self.clean = set()
- self.checked = set()
- self.depends_cache = {}
- self.data = None
- self.data_fn = None
- self.cacheclean = True
-
- if self.cachedir in [None, '']:
- self.has_cache = False
- logger.info("Not using a cache. "
- "Set CACHE = <directory> to enable.")
- return
-
- self.has_cache = True
- self.cachefile = os.path.join(self.cachedir, "bb_cache.dat")
-
- logger.debug(1, "Using cache in '%s'", self.cachedir)
- bb.utils.mkdirhier(self.cachedir)
-
- # If any of configuration.data's dependencies are newer than the
- # cache there isn't even any point in loading it...
- newest_mtime = 0
- deps = bb.data.getVar("__base_depends", data)
-
- old_mtimes = [old_mtime for _, old_mtime in deps]
- old_mtimes.append(newest_mtime)
- newest_mtime = max(old_mtimes)
-
- if bb.parse.cached_mtime_noerror(self.cachefile) >= newest_mtime:
- self.load_cachefile()
- elif os.path.isfile(self.cachefile):
- logger.info("Out of date cache found, rebuilding...")
-
- def load_cachefile(self):
- with open(self.cachefile, "rb") as cachefile:
- pickled = pickle.Unpickler(cachefile)
- try:
- cache_ver = pickled.load()
- bitbake_ver = pickled.load()
- except Exception:
- logger.info('Invalid cache, rebuilding...')
- return
-
- if cache_ver != __cache_version__:
- logger.info('Cache version mismatch, rebuilding...')
- return
- elif bitbake_ver != bb.__version__:
- logger.info('Bitbake version mismatch, rebuilding...')
- return
-
- cachesize = os.fstat(cachefile.fileno()).st_size
- bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
-
- previous_percent = 0
- while cachefile:
- try:
- key = pickled.load()
- value = pickled.load()
- except Exception:
- break
-
- self.depends_cache[key] = value
-
- # only fire events on even percentage boundaries
- current_progress = cachefile.tell()
- current_percent = 100 * current_progress / cachesize
- if current_percent > previous_percent:
- previous_percent = current_percent
- bb.event.fire(bb.event.CacheLoadProgress(current_progress),
- self.data)
-
- bb.event.fire(bb.event.CacheLoadCompleted(cachesize,
- len(self.depends_cache)),
- self.data)
-
- @staticmethod
- def virtualfn2realfn(virtualfn):
- """
- Convert a virtual file name to a real one + the associated subclass keyword
- """
-
- fn = virtualfn
- cls = ""
- if virtualfn.startswith('virtual:'):
- cls = virtualfn.split(':', 2)[1]
- fn = virtualfn.replace('virtual:' + cls + ':', '')
- return (fn, cls)
-
- @staticmethod
- def realfn2virtual(realfn, cls):
- """
- Convert a real filename + the associated subclass keyword to a virtual filename
- """
- if cls == "":
- return realfn
- return "virtual:" + cls + ":" + realfn
-
- @classmethod
- def loadDataFull(cls, virtualfn, appends, cfgData):
- """
- Return a complete set of data for fn.
- To do this, we need to parse the file.
- """
-
- (fn, virtual) = cls.virtualfn2realfn(virtualfn)
-
- logger.debug(1, "Parsing %s (full)", fn)
-
- bb_data = cls.load_bbfile(fn, appends, cfgData)
- return bb_data[virtual]
-
- @classmethod
- def parse(cls, filename, appends, configdata):
- """Parse the specified filename, returning the recipe information"""
- infos = []
- datastores = cls.load_bbfile(filename, appends, configdata)
- depends = set()
- for variant, data in sorted(datastores.iteritems(),
- key=lambda i: i[0],
- reverse=True):
- virtualfn = cls.realfn2virtual(filename, variant)
- depends |= (data.getVar("__depends", False) or set())
- if depends and not variant:
- data.setVar("__depends", depends)
- info = RecipeInfo.from_metadata(filename, data)
- infos.append((virtualfn, info))
- return infos
-
- def load(self, filename, appends, configdata):
- """Obtain the recipe information for the specified filename,
- using cached values if available, otherwise parsing.
-
- Note that if it does parse to obtain the info, it will not
- automatically add the information to the cache or to your
- CacheData. Use the add or add_info method to do so after
- running this, or use loadData instead."""
- cached = self.cacheValid(filename)
- if cached:
- infos = []
- info = self.depends_cache[filename]
- for variant in info.variants:
- virtualfn = self.realfn2virtual(filename, variant)
- infos.append((virtualfn, self.depends_cache[virtualfn]))
- else:
- logger.debug(1, "Parsing %s", filename)
- return self.parse(filename, appends, configdata)
-
- return cached, infos
-
- def loadData(self, fn, appends, cfgData, cacheData):
- """Load the recipe info for the specified filename,
- parsing and adding to the cache if necessary, and adding
- the recipe information to the supplied CacheData instance."""
- skipped, virtuals = 0, 0
-
- cached, infos = self.load(fn, appends, cfgData)
- for virtualfn, info in infos:
- if info.skipped:
- logger.debug(1, "Skipping %s", virtualfn)
- skipped += 1
- else:
- self.add_info(virtualfn, info, cacheData, not cached)
- virtuals += 1
-
- return cached, skipped, virtuals
-
- def cacheValid(self, fn):
- """
- Is the cache valid for fn?
- Fast version, no timestamps checked.
- """
- if fn not in self.checked:
- self.cacheValidUpdate(fn)
-
- # Is cache enabled?
- if not self.has_cache:
- return False
- if fn in self.clean:
- return True
- return False
-
- def cacheValidUpdate(self, fn):
- """
- Is the cache valid for fn?
- Make thorough (slower) checks including timestamps.
- """
- # Is cache enabled?
- if not self.has_cache:
- return False
-
- self.checked.add(fn)
-
- # File isn't in depends_cache
- if not fn in self.depends_cache:
- logger.debug(2, "Cache: %s is not cached", fn)
- return False
-
- mtime = bb.parse.cached_mtime_noerror(fn)
-
- # Check file still exists
- if mtime == 0:
- logger.debug(2, "Cache: %s no longer exists", fn)
- self.remove(fn)
- return False
-
- info = self.depends_cache[fn]
- # Check the file's timestamp
- if mtime != info.timestamp:
- logger.debug(2, "Cache: %s changed", fn)
- self.remove(fn)
- return False
-
- # Check dependencies are still valid
- depends = info.file_depends
- if depends:
- for f, old_mtime in depends:
- fmtime = bb.parse.cached_mtime_noerror(f)
- # Check if file still exists
- if old_mtime != 0 and fmtime == 0:
- logger.debug(2, "Cache: %s's dependency %s was removed",
- fn, f)
- self.remove(fn)
- return False
-
- if (fmtime != old_mtime):
- logger.debug(2, "Cache: %s's dependency %s changed",
- fn, f)
- self.remove(fn)
- return False
-
- invalid = False
- for cls in info.variants:
- virtualfn = self.realfn2virtual(fn, cls)
- self.clean.add(virtualfn)
- if virtualfn not in self.depends_cache:
- logger.debug(2, "Cache: %s is not cached", virtualfn)
- invalid = True
-
- # If any one of the variants is not present, mark as invalid for all
- if invalid:
- for cls in info.variants:
- virtualfn = self.realfn2virtual(fn, cls)
- if virtualfn in self.clean:
- logger.debug(2, "Cache: Removing %s from cache", virtualfn)
- self.clean.remove(virtualfn)
- if fn in self.clean:
- logger.debug(2, "Cache: Marking %s as not clean", fn)
- self.clean.remove(fn)
- return False
-
- self.clean.add(fn)
- return True
-
- def remove(self, fn):
- """
- Remove a fn from the cache
- Called from the parser in error cases
- """
- if fn in self.depends_cache:
- logger.debug(1, "Removing %s from cache", fn)
- del self.depends_cache[fn]
- if fn in self.clean:
- logger.debug(1, "Marking %s as unclean", fn)
- self.clean.remove(fn)
-
- def sync(self):
- """
- Save the cache
- Called from the parser when complete (or exiting)
- """
-
- if not self.has_cache:
- return
-
- if self.cacheclean:
- logger.debug(2, "Cache is clean, not saving.")
- return
-
- with open(self.cachefile, "wb") as cachefile:
- pickler = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
- pickler.dump(__cache_version__)
- pickler.dump(bb.__version__)
- for key, value in self.depends_cache.iteritems():
- pickler.dump(key)
- pickler.dump(value)
-
- del self.depends_cache
-
- @staticmethod
- def mtime(cachefile):
- return bb.parse.cached_mtime_noerror(cachefile)
-
- def add_info(self, filename, info, cacheData, parsed=None):
- cacheData.add_from_recipeinfo(filename, info)
- if not self.has_cache:
- return
-
- if 'SRCREVINACTION' not in info.pv and not info.nocache:
- if parsed:
- self.cacheclean = False
- self.depends_cache[filename] = info
-
- def add(self, file_name, data, cacheData, parsed=None):
- """
- Save data we need into the cache
- """
-
- realfn = self.virtualfn2realfn(file_name)[0]
- info = RecipeInfo.from_metadata(realfn, data)
- self.add_info(file_name, info, cacheData, parsed)
-
- @staticmethod
- def load_bbfile(bbfile, appends, config):
- """
- Load and parse one .bb build file
- Return the data and whether parsing resulted in the file being skipped
- """
- chdir_back = False
-
- from bb import data, parse
-
- # expand tmpdir to include this topdir
- data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
- bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
- oldpath = os.path.abspath(os.getcwd())
- parse.cached_mtime_noerror(bbfile_loc)
- bb_data = data.init_db(config)
- # The ConfHandler first looks if there is a TOPDIR and if not
- # then it would call getcwd().
- # Previously, we chdir()ed to bbfile_loc, called the handler
- # and finally chdir()ed back, a couple of thousand times. We now
- # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
- if not data.getVar('TOPDIR', bb_data):
- chdir_back = True
- data.setVar('TOPDIR', bbfile_loc, bb_data)
- try:
- if appends:
- data.setVar('__BBAPPEND', " ".join(appends), bb_data)
- bb_data = parse.handle(bbfile, bb_data)
- if chdir_back:
- os.chdir(oldpath)
- return bb_data
- except:
- if chdir_back:
- os.chdir(oldpath)
- raise
-
-
-def init(cooker):
- """
- The Objective: Cache the minimum amount of data possible yet get to the
- stage of building packages (i.e. tryBuild) without reparsing any .bb files.
-
- To do this, we intercept getVar calls and only cache the variables we see
- being accessed. We rely on the cache getVar calls being made for all
- variables bitbake might need to use to reach this stage. For each cached
- file we need to track:
-
- * Its mtime
- * The mtimes of all its dependencies
- * Whether it caused a parse.SkipPackage exception
-
- Files causing parsing errors are evicted from the cache.
-
- """
- return Cache(cooker.configuration.data)
-
-
-class CacheData(object):
- """
- The data structures we compile from the cached data
- """
-
- def __init__(self):
- # Direct cache variables
- self.providers = defaultdict(list)
- self.rproviders = defaultdict(list)
- self.packages = defaultdict(list)
- self.packages_dynamic = defaultdict(list)
- self.possible_world = []
- self.pkg_pn = defaultdict(list)
- self.pkg_fn = {}
- self.pkg_pepvpr = {}
- self.pkg_dp = {}
- self.pn_provides = defaultdict(list)
- self.fn_provides = {}
- self.all_depends = []
- self.deps = defaultdict(list)
- self.rundeps = defaultdict(lambda: defaultdict(list))
- self.runrecs = defaultdict(lambda: defaultdict(list))
- self.task_queues = {}
- self.task_deps = {}
- self.stamp = {}
- self.stamp_extrainfo = {}
- self.preferred = {}
- self.tasks = {}
- self.basetaskhash = {}
- self.hashfn = {}
-
- # Indirect Cache variables (set elsewhere)
- self.ignored_dependencies = []
- self.world_target = set()
- self.bbfile_priority = {}
- self.bbfile_config_priorities = []
-
- def add_from_recipeinfo(self, fn, info):
- self.task_deps[fn] = info.task_deps
- self.pkg_fn[fn] = info.pn
- self.pkg_pn[info.pn].append(fn)
- self.pkg_pepvpr[fn] = (info.pe, info.pv, info.pr)
- self.pkg_dp[fn] = info.defaultpref
- self.stamp[fn] = info.stamp
- self.stamp_extrainfo[fn] = info.stamp_extrainfo
-
- provides = [info.pn]
- for provide in info.provides:
- if provide not in provides:
- provides.append(provide)
- self.fn_provides[fn] = provides
-
- for provide in provides:
- self.providers[provide].append(fn)
- if provide not in self.pn_provides[info.pn]:
- self.pn_provides[info.pn].append(provide)
-
- for dep in info.depends:
- if dep not in self.deps[fn]:
- self.deps[fn].append(dep)
- if dep not in self.all_depends:
- self.all_depends.append(dep)
-
- rprovides = info.rprovides
- for package in info.packages:
- self.packages[package].append(fn)
- rprovides += info.rprovides_pkg[package]
-
- for rprovide in rprovides:
- self.rproviders[rprovide].append(fn)
-
- for package in info.packages_dynamic:
- self.packages_dynamic[package].append(fn)
-
- # Build hash of runtime depends and rececommends
- for package in info.packages + [info.pn]:
- self.rundeps[fn][package] = list(info.rdepends) + info.rdepends_pkg[package]
- self.runrecs[fn][package] = list(info.rrecommends) + info.rrecommends_pkg[package]
-
- # Collect files we may need for possible world-dep
- # calculations
- if not info.broken and not info.not_world:
- self.possible_world.append(fn)
-
- self.hashfn[fn] = info.hashfilename
- for task, taskhash in info.basetaskhashes.iteritems():
- identifier = '%s.%s' % (fn, task)
- self.basetaskhash[identifier] = taskhash
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py
deleted file mode 100644
index bfffcacc33..0000000000
--- a/bitbake/lib/bb/codeparser.py
+++ /dev/null
@@ -1,336 +0,0 @@
-import ast
-import codegen
-import logging
-import os.path
-import bb.utils, bb.data
-from itertools import chain
-from pysh import pyshyacc, pyshlex
-
-
-logger = logging.getLogger('BitBake.CodeParser')
-PARSERCACHE_VERSION = 2
-
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
-
-
-def check_indent(codestr):
- """If the code is indented, add a top level piece of code to 'remove' the indentation"""
-
- i = 0
- while codestr[i] in ["\n", " ", " "]:
- i = i + 1
-
- if i == 0:
- return codestr
-
- if codestr[i-1] is " " or codestr[i-1] is " ":
- return "if 1:\n" + codestr
-
- return codestr
-
-pythonparsecache = {}
-shellparsecache = {}
-
-def parser_cachefile(d):
- cachedir = (bb.data.getVar("PERSISTENT_DIR", d, True) or
- bb.data.getVar("CACHE", d, True))
- if cachedir in [None, '']:
- return None
- bb.utils.mkdirhier(cachedir)
- cachefile = os.path.join(cachedir, "bb_codeparser.dat")
- logger.debug(1, "Using cache in '%s' for codeparser cache", cachefile)
- return cachefile
-
-def parser_cache_init(d):
- global pythonparsecache
- global shellparsecache
-
- cachefile = parser_cachefile(d)
- if not cachefile:
- return
-
- try:
- p = pickle.Unpickler(file(cachefile, "rb"))
- data, version = p.load()
- except:
- return
-
- if version != PARSERCACHE_VERSION:
- return
-
- pythonparsecache = data[0]
- shellparsecache = data[1]
-
-def parser_cache_save(d):
- cachefile = parser_cachefile(d)
- if not cachefile:
- return
-
- p = pickle.Pickler(file(cachefile, "wb"), -1)
- p.dump([[pythonparsecache, shellparsecache], PARSERCACHE_VERSION])
-
-class PythonParser():
- class ValueVisitor():
- """Visitor to traverse a python abstract syntax tree and obtain
- the variables referenced via bitbake metadata APIs, and the external
- functions called.
- """
-
- getvars = ("d.getVar", "bb.data.getVar", "data.getVar")
- expands = ("d.expand", "bb.data.expand", "data.expand")
- execs = ("bb.build.exec_func", "bb.build.exec_task")
-
- @classmethod
- def _compare_name(cls, strparts, node):
- """Given a sequence of strings representing a python name,
- where the last component is the actual Name and the prior
- elements are Attribute nodes, determine if the supplied node
- matches.
- """
-
- if not strparts:
- return True
-
- current, rest = strparts[0], strparts[1:]
- if isinstance(node, ast.Attribute):
- if current == node.attr:
- return cls._compare_name(rest, node.value)
- elif isinstance(node, ast.Name):
- if current == node.id:
- return True
- return False
-
- @classmethod
- def compare_name(cls, value, node):
- """Convenience function for the _compare_node method, which
- can accept a string (which is split by '.' for you), or an
- iterable of strings, in which case it checks to see if any of
- them match, similar to isinstance.
- """
-
- if isinstance(value, basestring):
- return cls._compare_name(tuple(reversed(value.split("."))),
- node)
- else:
- return any(cls.compare_name(item, node) for item in value)
-
- def __init__(self, value):
- self.var_references = set()
- self.var_execs = set()
- self.direct_func_calls = set()
- self.var_expands = set()
- self.value = value
-
- @classmethod
- def warn(cls, func, arg):
- """Warn about calls of bitbake APIs which pass a non-literal
- argument for the variable name, as we're not able to track such
- a reference.
- """
-
- try:
- funcstr = codegen.to_source(func)
- argstr = codegen.to_source(arg)
- except TypeError:
- logger.debug(2, 'Failed to convert function and argument to source form')
- else:
- logger.debug(1, "Warning: in call to '%s', argument '%s' is "
- "not a literal", funcstr, argstr)
-
- def visit_Call(self, node):
- if self.compare_name(self.getvars, node.func):
- if isinstance(node.args[0], ast.Str):
- self.var_references.add(node.args[0].s)
- else:
- self.warn(node.func, node.args[0])
- elif self.compare_name(self.expands, node.func):
- if isinstance(node.args[0], ast.Str):
- self.warn(node.func, node.args[0])
- self.var_expands.update(node.args[0].s)
- elif isinstance(node.args[0], ast.Call) and \
- self.compare_name(self.getvars, node.args[0].func):
- pass
- else:
- self.warn(node.func, node.args[0])
- elif self.compare_name(self.execs, node.func):
- if isinstance(node.args[0], ast.Str):
- self.var_execs.add(node.args[0].s)
- else:
- self.warn(node.func, node.args[0])
- elif isinstance(node.func, ast.Name):
- self.direct_func_calls.add(node.func.id)
- elif isinstance(node.func, ast.Attribute):
- # We must have a qualified name. Therefore we need
- # to walk the chain of 'Attribute' nodes to determine
- # the qualification.
- attr_node = node.func.value
- identifier = node.func.attr
- while isinstance(attr_node, ast.Attribute):
- identifier = attr_node.attr + "." + identifier
- attr_node = attr_node.value
- if isinstance(attr_node, ast.Name):
- identifier = attr_node.id + "." + identifier
- self.direct_func_calls.add(identifier)
-
- def __init__(self):
- #self.funcdefs = set()
- self.execs = set()
- #self.external_cmds = set()
- self.references = set()
-
- def parse_python(self, node):
-
- h = hash(str(node))
-
- if h in pythonparsecache:
- self.references = pythonparsecache[h]["refs"]
- self.execs = pythonparsecache[h]["execs"]
- return
-
- code = compile(check_indent(str(node)), "<string>", "exec",
- ast.PyCF_ONLY_AST)
-
- visitor = self.ValueVisitor(code)
- for n in ast.walk(code):
- if n.__class__.__name__ == "Call":
- visitor.visit_Call(n)
-
- self.references.update(visitor.var_references)
- self.references.update(visitor.var_execs)
- self.execs = visitor.direct_func_calls
-
- pythonparsecache[h] = {}
- pythonparsecache[h]["refs"] = self.references
- pythonparsecache[h]["execs"] = self.execs
-
-class ShellParser():
- def __init__(self):
- self.funcdefs = set()
- self.allexecs = set()
- self.execs = set()
-
- def parse_shell(self, value):
- """Parse the supplied shell code in a string, returning the external
- commands it executes.
- """
-
- h = hash(str(value))
-
- if h in shellparsecache:
- self.execs = shellparsecache[h]["execs"]
- return self.execs
-
- try:
- tokens, _ = pyshyacc.parse(value, eof=True, debug=False)
- except pyshlex.NeedMore:
- raise ShellSyntaxError("Unexpected EOF")
-
- for token in tokens:
- self.process_tokens(token)
- self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs)
-
- shellparsecache[h] = {}
- shellparsecache[h]["execs"] = self.execs
-
- return self.execs
-
- def process_tokens(self, tokens):
- """Process a supplied portion of the syntax tree as returned by
- pyshyacc.parse.
- """
-
- def function_definition(value):
- self.funcdefs.add(value.name)
- return [value.body], None
-
- def case_clause(value):
- # Element 0 of each item in the case is the list of patterns, and
- # Element 1 of each item in the case is the list of commands to be
- # executed when that pattern matches.
- words = chain(*[item[0] for item in value.items])
- cmds = chain(*[item[1] for item in value.items])
- return cmds, words
-
- def if_clause(value):
- main = chain(value.cond, value.if_cmds)
- rest = value.else_cmds
- if isinstance(rest, tuple) and rest[0] == "elif":
- return chain(main, if_clause(rest[1]))
- else:
- return chain(main, rest)
-
- def simple_command(value):
- return None, chain(value.words, (assign[1] for assign in value.assigns))
-
- token_handlers = {
- "and_or": lambda x: ((x.left, x.right), None),
- "async": lambda x: ([x], None),
- "brace_group": lambda x: (x.cmds, None),
- "for_clause": lambda x: (x.cmds, x.items),
- "function_definition": function_definition,
- "if_clause": lambda x: (if_clause(x), None),
- "pipeline": lambda x: (x.commands, None),
- "redirect_list": lambda x: ([x.cmd], None),
- "subshell": lambda x: (x.cmds, None),
- "while_clause": lambda x: (chain(x.condition, x.cmds), None),
- "until_clause": lambda x: (chain(x.condition, x.cmds), None),
- "simple_command": simple_command,
- "case_clause": case_clause,
- }
-
- for token in tokens:
- name, value = token
- try:
- more_tokens, words = token_handlers[name](value)
- except KeyError:
- raise NotImplementedError("Unsupported token type " + name)
-
- if more_tokens:
- self.process_tokens(more_tokens)
-
- if words:
- self.process_words(words)
-
- def process_words(self, words):
- """Process a set of 'words' in pyshyacc parlance, which includes
- extraction of executed commands from $() blocks, as well as grabbing
- the command name argument.
- """
-
- words = list(words)
- for word in list(words):
- wtree = pyshlex.make_wordtree(word[1])
- for part in wtree:
- if not isinstance(part, list):
- continue
-
- if part[0] in ('`', '$('):
- command = pyshlex.wordtree_as_string(part[1:-1])
- self.parse_shell(command)
-
- if word[0] in ("cmd_name", "cmd_word"):
- if word in words:
- words.remove(word)
-
- usetoken = False
- for word in words:
- if word[0] in ("cmd_name", "cmd_word") or \
- (usetoken and word[0] == "TOKEN"):
- if "=" in word[1]:
- usetoken = True
- continue
-
- cmd = word[1]
- if cmd.startswith("$"):
- logger.debug(1, "Warning: execution of non-literal "
- "command '%s'", cmd)
- elif cmd == "eval":
- command = " ".join(word for _, word in words[1:])
- self.parse_shell(command)
- else:
- self.allexecs.add(cmd)
- break
diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py
deleted file mode 100644
index b88089298c..0000000000
--- a/bitbake/lib/bb/command.py
+++ /dev/null
@@ -1,271 +0,0 @@
-"""
-BitBake 'Command' module
-
-Provide an interface to interact with the bitbake server through 'commands'
-"""
-
-# Copyright (C) 2006-2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-"""
-The bitbake server takes 'commands' from its UI/commandline.
-Commands are either synchronous or asynchronous.
-Async commands return data to the client in the form of events.
-Sync commands must only return data through the function return value
-and must not trigger events, directly or indirectly.
-Commands are queued in a CommandQueue
-"""
-
-import bb.event
-import bb.cooker
-import bb.data
-
-async_cmds = {}
-sync_cmds = {}
-
-
-class CommandCompleted(bb.event.Event):
- pass
-
-class CommandExit(bb.event.Event):
- def __init__(self, exitcode):
- bb.event.Event.__init__(self)
- self.exitcode = int(exitcode)
-
-class CommandFailed(CommandExit):
- def __init__(self, message):
- self.error = message
- CommandExit.__init__(self, 1)
-
-class Command:
- """
- A queue of asynchronous commands for bitbake
- """
- def __init__(self, cooker):
- self.cooker = cooker
- self.cmds_sync = CommandsSync()
- self.cmds_async = CommandsAsync()
-
- # FIXME Add lock for this
- self.currentAsyncCommand = None
-
- for attr in CommandsSync.__dict__:
- command = attr[:].lower()
- method = getattr(CommandsSync, attr)
- sync_cmds[command] = (method)
-
- for attr in CommandsAsync.__dict__:
- command = attr[:].lower()
- method = getattr(CommandsAsync, attr)
- async_cmds[command] = (method)
-
- def runCommand(self, commandline):
- try:
- command = commandline.pop(0)
- if command in CommandsSync.__dict__:
- # Can run synchronous commands straight away
- return getattr(CommandsSync, command)(self.cmds_sync, self, commandline)
- if self.currentAsyncCommand is not None:
- return "Busy (%s in progress)" % self.currentAsyncCommand[0]
- if command not in CommandsAsync.__dict__:
- return "No such command"
- self.currentAsyncCommand = (command, commandline)
- self.cooker.server.register_idle_function(self.cooker.runCommands, self.cooker)
- return True
- except:
- import traceback
- return traceback.format_exc()
-
- def runAsyncCommand(self):
- try:
- if self.currentAsyncCommand is not None:
- (command, options) = self.currentAsyncCommand
- commandmethod = getattr(CommandsAsync, command)
- needcache = getattr( commandmethod, "needcache" )
- if (needcache and self.cooker.state in
- (bb.cooker.state.initial, bb.cooker.state.parsing)):
- self.cooker.updateCache()
- return True
- else:
- commandmethod(self.cmds_async, self, options)
- return False
- else:
- return False
- except KeyboardInterrupt as exc:
- self.finishAsyncCommand("Interrupted")
- return False
- except SystemExit as exc:
- arg = exc.args[0]
- if isinstance(arg, basestring):
- self.finishAsyncCommand(arg)
- else:
- self.finishAsyncCommand("Exited with %s" % arg)
- return False
- except Exception:
- import traceback
- self.finishAsyncCommand(traceback.format_exc())
- return False
-
- def finishAsyncCommand(self, msg=None, code=None):
- if msg:
- bb.event.fire(CommandFailed(msg), self.cooker.configuration.event_data)
- elif code:
- bb.event.fire(CommandExit(code), self.cooker.configuration.event_data)
- else:
- bb.event.fire(CommandCompleted(), self.cooker.configuration.event_data)
- self.currentAsyncCommand = None
-
-
-class CommandsSync:
- """
- A class of synchronous commands
- These should run quickly so as not to hurt interactive performance.
- These must not influence any running synchronous command.
- """
-
- def stateShutdown(self, command, params):
- """
- Trigger cooker 'shutdown' mode
- """
- command.cooker.shutdown()
-
- def stateStop(self, command, params):
- """
- Stop the cooker
- """
- command.cooker.stop()
-
- def getCmdLineAction(self, command, params):
- """
- Get any command parsed from the commandline
- """
- return command.cooker.commandlineAction
-
- def getVariable(self, command, params):
- """
- Read the value of a variable from configuration.data
- """
- varname = params[0]
- expand = True
- if len(params) > 1:
- expand = params[1]
-
- return bb.data.getVar(varname, command.cooker.configuration.data, expand)
-
- def setVariable(self, command, params):
- """
- Set the value of variable in configuration.data
- """
- varname = params[0]
- value = params[1]
- bb.data.setVar(varname, value, command.cooker.configuration.data)
-
-
-class CommandsAsync:
- """
- A class of asynchronous commands
- These functions communicate via generated events.
- Any function that requires metadata parsing should be here.
- """
-
- def buildFile(self, command, params):
- """
- Build a single specified .bb file
- """
- bfile = params[0]
- task = params[1]
-
- command.cooker.buildFile(bfile, task)
- buildFile.needcache = False
-
- def buildTargets(self, command, params):
- """
- Build a set of targets
- """
- pkgs_to_build = params[0]
- task = params[1]
-
- command.cooker.buildTargets(pkgs_to_build, task)
- buildTargets.needcache = True
-
- def generateDepTreeEvent(self, command, params):
- """
- Generate an event containing the dependency information
- """
- pkgs_to_build = params[0]
- task = params[1]
-
- command.cooker.generateDepTreeEvent(pkgs_to_build, task)
- command.finishAsyncCommand()
- generateDepTreeEvent.needcache = True
-
- def generateDotGraph(self, command, params):
- """
- Dump dependency information to disk as .dot files
- """
- pkgs_to_build = params[0]
- task = params[1]
-
- command.cooker.generateDotGraphFiles(pkgs_to_build, task)
- command.finishAsyncCommand()
- generateDotGraph.needcache = True
-
- def showVersions(self, command, params):
- """
- Show the currently selected versions
- """
- command.cooker.showVersions()
- command.finishAsyncCommand()
- showVersions.needcache = True
-
- def showEnvironmentTarget(self, command, params):
- """
- Print the environment of a target recipe
- (needs the cache to work out which recipe to use)
- """
- pkg = params[0]
-
- command.cooker.showEnvironment(None, pkg)
- command.finishAsyncCommand()
- showEnvironmentTarget.needcache = True
-
- def showEnvironment(self, command, params):
- """
- Print the standard environment
- or if specified the environment for a specified recipe
- """
- bfile = params[0]
-
- command.cooker.showEnvironment(bfile)
- command.finishAsyncCommand()
- showEnvironment.needcache = False
-
- def parseFiles(self, command, params):
- """
- Parse the .bb files
- """
- command.cooker.updateCache()
- command.finishAsyncCommand()
- parseFiles.needcache = True
-
- def compareRevisions(self, command, params):
- """
- Parse the .bb files
- """
- if bb.fetch.fetcher_compare_revisions(command.cooker.configuration.data):
- command.finishAsyncCommand(code=1)
- else:
- command.finishAsyncCommand()
- compareRevisions.needcache = True
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
deleted file mode 100644
index ff16daf83f..0000000000
--- a/bitbake/lib/bb/cooker.py
+++ /dev/null
@@ -1,1078 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
-# Copyright (C) 2005 Holger Hans Peter Freyther
-# Copyright (C) 2005 ROAD GmbH
-# Copyright (C) 2006 - 2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-from __future__ import print_function
-import sys, os, glob, os.path, re, time
-import atexit
-import itertools
-import logging
-import multiprocessing
-import signal
-import sre_constants
-import threading
-from cStringIO import StringIO
-from contextlib import closing
-import bb
-from bb import utils, data, parse, event, cache, providers, taskdata, command, runqueue
-
-logger = logging.getLogger("BitBake")
-collectlog = logging.getLogger("BitBake.Collection")
-buildlog = logging.getLogger("BitBake.Build")
-parselog = logging.getLogger("BitBake.Parsing")
-providerlog = logging.getLogger("BitBake.Provider")
-
-class MultipleMatches(Exception):
- """
- Exception raised when multiple file matches are found
- """
-
-class NothingToBuild(Exception):
- """
- Exception raised when there is nothing to build
- """
-
-class state:
- initial, parsing, running, shutdown, stop = range(5)
-
-#============================================================================#
-# BBCooker
-#============================================================================#
-class BBCooker:
- """
- Manages one bitbake build run
- """
-
- def __init__(self, configuration, server):
- self.status = None
- self.appendlist = {}
-
- if server:
- self.server = server.BitBakeServer(self)
-
- self.configuration = configuration
-
- self.configuration.data = bb.data.init()
-
- if not server:
- bb.data.setVar("BB_WORKERCONTEXT", "1", self.configuration.data)
-
- bb.data.inheritFromOS(self.configuration.data)
-
- self.parseConfigurationFiles(self.configuration.file)
-
- if not self.configuration.cmd:
- self.configuration.cmd = bb.data.getVar("BB_DEFAULT_TASK", self.configuration.data, True) or "build"
-
- bbpkgs = bb.data.getVar('BBPKGS', self.configuration.data, True)
- if bbpkgs and len(self.configuration.pkgs_to_build) == 0:
- self.configuration.pkgs_to_build.extend(bbpkgs.split())
-
- #
- # Special updated configuration we use for firing events
- #
- self.configuration.event_data = bb.data.createCopy(self.configuration.data)
- bb.data.update_data(self.configuration.event_data)
-
- # TOSTOP must not be set or our children will hang when they output
- fd = sys.stdout.fileno()
- if os.isatty(fd):
- import termios
- tcattr = termios.tcgetattr(fd)
- if tcattr[3] & termios.TOSTOP:
- buildlog.info("The terminal had the TOSTOP bit set, clearing...")
- tcattr[3] = tcattr[3] & ~termios.TOSTOP
- termios.tcsetattr(fd, termios.TCSANOW, tcattr)
-
- self.command = bb.command.Command(self)
- self.state = state.initial
-
- def parseConfiguration(self):
-
-
- # Change nice level if we're asked to
- nice = bb.data.getVar("BB_NICE_LEVEL", self.configuration.data, True)
- if nice:
- curnice = os.nice(0)
- nice = int(nice) - curnice
- buildlog.verbose("Renice to %s " % os.nice(nice))
-
- def parseCommandLine(self):
- # Parse any commandline into actions
- if self.configuration.show_environment:
- self.commandlineAction = None
-
- if 'world' in self.configuration.pkgs_to_build:
- buildlog.error("'world' is not a valid target for --environment.")
- elif len(self.configuration.pkgs_to_build) > 1:
- buildlog.error("Only one target can be used with the --environment option.")
- elif self.configuration.buildfile and len(self.configuration.pkgs_to_build) > 0:
- buildlog.error("No target should be used with the --environment and --buildfile options.")
- elif len(self.configuration.pkgs_to_build) > 0:
- self.commandlineAction = ["showEnvironmentTarget", self.configuration.pkgs_to_build]
- else:
- self.commandlineAction = ["showEnvironment", self.configuration.buildfile]
- elif self.configuration.buildfile is not None:
- self.commandlineAction = ["buildFile", self.configuration.buildfile, self.configuration.cmd]
- elif self.configuration.revisions_changed:
- self.commandlineAction = ["compareRevisions"]
- elif self.configuration.show_versions:
- self.commandlineAction = ["showVersions"]
- elif self.configuration.parse_only:
- self.commandlineAction = ["parseFiles"]
- elif self.configuration.dot_graph:
- if self.configuration.pkgs_to_build:
- self.commandlineAction = ["generateDotGraph", self.configuration.pkgs_to_build, self.configuration.cmd]
- else:
- self.commandlineAction = None
- buildlog.error("Please specify a package name for dependency graph generation.")
- else:
- if self.configuration.pkgs_to_build:
- self.commandlineAction = ["buildTargets", self.configuration.pkgs_to_build, self.configuration.cmd]
- else:
- self.commandlineAction = None
- buildlog.error("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
-
- def runCommands(self, server, data, abort):
- """
- Run any queued asynchronous command
- This is done by the idle handler so it runs in true context rather than
- tied to any UI.
- """
-
- return self.command.runAsyncCommand()
-
- def showVersions(self):
-
- # Need files parsed
- self.updateCache()
-
- pkg_pn = self.status.pkg_pn
- preferred_versions = {}
- latest_versions = {}
-
- # Sort by priority
- for pn in pkg_pn:
- (last_ver, last_file, pref_ver, pref_file) = bb.providers.findBestProvider(pn, self.configuration.data, self.status)
- preferred_versions[pn] = (pref_ver, pref_file)
- latest_versions[pn] = (last_ver, last_file)
-
- logger.plain("%-35s %25s %25s", "Package Name", "Latest Version", "Preferred Version")
- logger.plain("%-35s %25s %25s\n", "============", "==============", "=================")
-
- for p in sorted(pkg_pn):
- pref = preferred_versions[p]
- latest = latest_versions[p]
-
- prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
- lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
-
- if pref == latest:
- prefstr = ""
-
- logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
-
- def showEnvironment(self, buildfile = None, pkgs_to_build = []):
- """
- Show the outer or per-package environment
- """
- fn = None
- envdata = None
-
- if buildfile:
- fn = self.matchFile(buildfile)
- elif len(pkgs_to_build) == 1:
- self.updateCache()
-
- localdata = data.createCopy(self.configuration.data)
- bb.data.update_data(localdata)
- bb.data.expandKeys(localdata)
-
- taskdata = bb.taskdata.TaskData(self.configuration.abort)
- taskdata.add_provider(localdata, self.status, pkgs_to_build[0])
- taskdata.add_unresolved(localdata, self.status)
-
- targetid = taskdata.getbuild_id(pkgs_to_build[0])
- fnid = taskdata.build_targets[targetid][0]
- fn = taskdata.fn_index[fnid]
- else:
- envdata = self.configuration.data
-
- if fn:
- try:
- envdata = bb.cache.Cache.loadDataFull(fn, self.get_file_appends(fn), self.configuration.data)
- except Exception, e:
- parselog.exception("Unable to read %s", fn)
- raise
-
- # emit variables and shell functions
- data.update_data(envdata)
- with closing(StringIO()) as env:
- data.emit_env(env, envdata, True)
- logger.plain(env.getvalue())
-
- # emit the metadata which isnt valid shell
- data.expandKeys(envdata)
- for e in envdata.keys():
- if data.getVarFlag( e, 'python', envdata ):
- logger.plain("\npython %s () {\n%s}\n", e, data.getVar(e, envdata, 1))
-
- def generateDepTreeData(self, pkgs_to_build, task):
- """
- Create a dependency tree of pkgs_to_build, returning the data.
- """
-
- # Need files parsed
- self.updateCache()
-
- # If we are told to do the None task then query the default task
- if (task == None):
- task = self.configuration.cmd
-
- pkgs_to_build = self.checkPackages(pkgs_to_build)
-
- localdata = data.createCopy(self.configuration.data)
- bb.data.update_data(localdata)
- bb.data.expandKeys(localdata)
- taskdata = bb.taskdata.TaskData(self.configuration.abort)
-
- runlist = []
- for k in pkgs_to_build:
- taskdata.add_provider(localdata, self.status, k)
- runlist.append([k, "do_%s" % task])
- taskdata.add_unresolved(localdata, self.status)
-
- rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
- rq.rqdata.prepare()
-
- seen_fnids = []
- depend_tree = {}
- depend_tree["depends"] = {}
- depend_tree["tdepends"] = {}
- depend_tree["pn"] = {}
- depend_tree["rdepends-pn"] = {}
- depend_tree["packages"] = {}
- depend_tree["rdepends-pkg"] = {}
- depend_tree["rrecs-pkg"] = {}
-
- for task in xrange(len(rq.rqdata.runq_fnid)):
- taskname = rq.rqdata.runq_task[task]
- fnid = rq.rqdata.runq_fnid[task]
- fn = taskdata.fn_index[fnid]
- pn = self.status.pkg_fn[fn]
- version = "%s:%s-%s" % self.status.pkg_pepvpr[fn]
- if pn not in depend_tree["pn"]:
- depend_tree["pn"][pn] = {}
- depend_tree["pn"][pn]["filename"] = fn
- depend_tree["pn"][pn]["version"] = version
- for dep in rq.rqdata.runq_depends[task]:
- depfn = taskdata.fn_index[rq.rqdata.runq_fnid[dep]]
- deppn = self.status.pkg_fn[depfn]
- dotname = "%s.%s" % (pn, rq.rqdata.runq_task[task])
- if not dotname in depend_tree["tdepends"]:
- depend_tree["tdepends"][dotname] = []
- depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.rqdata.runq_task[dep]))
- if fnid not in seen_fnids:
- seen_fnids.append(fnid)
- packages = []
-
- depend_tree["depends"][pn] = []
- for dep in taskdata.depids[fnid]:
- depend_tree["depends"][pn].append(taskdata.build_names_index[dep])
-
- depend_tree["rdepends-pn"][pn] = []
- for rdep in taskdata.rdepids[fnid]:
- depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
-
- rdepends = self.status.rundeps[fn]
- for package in rdepends:
- depend_tree["rdepends-pkg"][package] = []
- for rdepend in rdepends[package]:
- depend_tree["rdepends-pkg"][package].append(rdepend)
- packages.append(package)
-
- rrecs = self.status.runrecs[fn]
- for package in rrecs:
- depend_tree["rrecs-pkg"][package] = []
- for rdepend in rrecs[package]:
- depend_tree["rrecs-pkg"][package].append(rdepend)
- if not package in packages:
- packages.append(package)
-
- for package in packages:
- if package not in depend_tree["packages"]:
- depend_tree["packages"][package] = {}
- depend_tree["packages"][package]["pn"] = pn
- depend_tree["packages"][package]["filename"] = fn
- depend_tree["packages"][package]["version"] = version
-
- return depend_tree
-
-
- def generateDepTreeEvent(self, pkgs_to_build, task):
- """
- Create a task dependency graph of pkgs_to_build.
- Generate an event with the result
- """
- depgraph = self.generateDepTreeData(pkgs_to_build, task)
- bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.configuration.data)
-
- def generateDotGraphFiles(self, pkgs_to_build, task):
- """
- Create a task dependency graph of pkgs_to_build.
- Save the result to a set of .dot files.
- """
-
- depgraph = self.generateDepTreeData(pkgs_to_build, task)
-
- # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
- depends_file = file('pn-depends.dot', 'w' )
- print("digraph depends {", file=depends_file)
- for pn in depgraph["pn"]:
- fn = depgraph["pn"][pn]["filename"]
- version = depgraph["pn"][pn]["version"]
- print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
- for pn in depgraph["depends"]:
- for depend in depgraph["depends"][pn]:
- print('"%s" -> "%s"' % (pn, depend), file=depends_file)
- for pn in depgraph["rdepends-pn"]:
- for rdepend in depgraph["rdepends-pn"][pn]:
- print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
- print("}", file=depends_file)
- logger.info("PN dependencies saved to 'pn-depends.dot'")
-
- depends_file = file('package-depends.dot', 'w' )
- print("digraph depends {", file=depends_file)
- for package in depgraph["packages"]:
- pn = depgraph["packages"][package]["pn"]
- fn = depgraph["packages"][package]["filename"]
- version = depgraph["packages"][package]["version"]
- if package == pn:
- print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
- else:
- print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file)
- for depend in depgraph["depends"][pn]:
- print('"%s" -> "%s"' % (package, depend), file=depends_file)
- for package in depgraph["rdepends-pkg"]:
- for rdepend in depgraph["rdepends-pkg"][package]:
- print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
- for package in depgraph["rrecs-pkg"]:
- for rdepend in depgraph["rrecs-pkg"][package]:
- print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
- print("}", file=depends_file)
- logger.info("Package dependencies saved to 'package-depends.dot'")
-
- tdepends_file = file('task-depends.dot', 'w' )
- print("digraph depends {", file=tdepends_file)
- for task in depgraph["tdepends"]:
- (pn, taskname) = task.rsplit(".", 1)
- fn = depgraph["pn"][pn]["filename"]
- version = depgraph["pn"][pn]["version"]
- print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn), file=tdepends_file)
- for dep in depgraph["tdepends"][task]:
- print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
- print("}", file=tdepends_file)
- logger.info("Task dependencies saved to 'task-depends.dot'")
-
- def buildDepgraph( self ):
- all_depends = self.status.all_depends
- pn_provides = self.status.pn_provides
-
- localdata = data.createCopy(self.configuration.data)
- bb.data.update_data(localdata)
- bb.data.expandKeys(localdata)
-
- matched = set()
- def calc_bbfile_priority(filename):
- for _, _, regex, pri in self.status.bbfile_config_priorities:
- if regex.match(filename):
- if not regex in matched:
- matched.add(regex)
- return pri
- return 0
-
- # Handle PREFERRED_PROVIDERS
- for p in (bb.data.getVar('PREFERRED_PROVIDERS', localdata, 1) or "").split():
- try:
- (providee, provider) = p.split(':')
- except:
- providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
- continue
- if providee in self.status.preferred and self.status.preferred[providee] != provider:
- providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.status.preferred[providee])
- self.status.preferred[providee] = provider
-
- # Calculate priorities for each file
- for p in self.status.pkg_fn:
- self.status.bbfile_priority[p] = calc_bbfile_priority(p)
-
- for collection, pattern, regex, _ in self.status.bbfile_config_priorities:
- if not regex in matched:
- collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
-
- def buildWorldTargetList(self):
- """
- Build package list for "bitbake world"
- """
- all_depends = self.status.all_depends
- pn_provides = self.status.pn_provides
- parselog.debug(1, "collating packages for \"world\"")
- for f in self.status.possible_world:
- terminal = True
- pn = self.status.pkg_fn[f]
-
- for p in pn_provides[pn]:
- if p.startswith('virtual/'):
- parselog.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p)
- terminal = False
- break
- for pf in self.status.providers[p]:
- if self.status.pkg_fn[pf] != pn:
- parselog.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p)
- terminal = False
- break
- if terminal:
- self.status.world_target.add(pn)
-
- # drop reference count now
- self.status.possible_world = None
- self.status.all_depends = None
-
- def interactiveMode( self ):
- """Drop off into a shell"""
- try:
- from bb import shell
- except ImportError:
- parselog.exception("Interactive mode not available")
- sys.exit(1)
- else:
- shell.start( self )
-
- def _findLayerConf(self):
- path = os.getcwd()
- while path != "/":
- bblayers = os.path.join(path, "conf", "bblayers.conf")
- if os.path.exists(bblayers):
- return bblayers
-
- path, _ = os.path.split(path)
-
- def parseConfigurationFiles(self, files):
- def _parse(f, data, include=False):
- try:
- return bb.parse.handle(f, data, include)
- except (IOError, bb.parse.ParseError) as exc:
- parselog.critical("Unable to parse %s: %s" % (f, exc))
- sys.exit(1)
-
- data = self.configuration.data
- bb.parse.init_parser(data)
- for f in files:
- data = _parse(f, data)
-
- layerconf = self._findLayerConf()
- if layerconf:
- parselog.debug(2, "Found bblayers.conf (%s)", layerconf)
- data = _parse(layerconf, data)
-
- layers = (bb.data.getVar('BBLAYERS', data, True) or "").split()
-
- data = bb.data.createCopy(data)
- for layer in layers:
- parselog.debug(2, "Adding layer %s", layer)
- bb.data.setVar('LAYERDIR', layer, data)
- data = _parse(os.path.join(layer, "conf", "layer.conf"), data)
- data.expandVarref('LAYERDIR')
-
- bb.data.delVar('LAYERDIR', data)
-
- if not data.getVar("BBPATH", True):
- raise SystemExit("The BBPATH variable is not set")
-
- data = _parse(os.path.join("conf", "bitbake.conf"), data)
-
- self.configuration.data = data
-
- # Handle any INHERITs and inherit the base class
- inherits = ["base"] + (bb.data.getVar('INHERIT', self.configuration.data, True ) or "").split()
- for inherit in inherits:
- self.configuration.data = _parse(os.path.join('classes', '%s.bbclass' % inherit), self.configuration.data, True )
-
- # Nomally we only register event handlers at the end of parsing .bb files
- # We register any handlers we've found so far here...
- for var in bb.data.getVar('__BBHANDLERS', self.configuration.data) or []:
- bb.event.register(var, bb.data.getVar(var, self.configuration.data))
-
- if bb.data.getVar("BB_WORKERCONTEXT", self.configuration.data) is None:
- bb.fetch.fetcher_init(self.configuration.data)
- bb.codeparser.parser_cache_init(self.configuration.data)
- bb.parse.init_parser(data)
- bb.event.fire(bb.event.ConfigParsed(), self.configuration.data)
-
- def handleCollections( self, collections ):
- """Handle collections"""
- if collections:
- collection_list = collections.split()
- for c in collection_list:
- regex = bb.data.getVar("BBFILE_PATTERN_%s" % c, self.configuration.data, 1)
- if regex == None:
- parselog.error("BBFILE_PATTERN_%s not defined" % c)
- continue
- priority = bb.data.getVar("BBFILE_PRIORITY_%s" % c, self.configuration.data, 1)
- if priority == None:
- parselog.error("BBFILE_PRIORITY_%s not defined" % c)
- continue
- try:
- cre = re.compile(regex)
- except re.error:
- parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
- continue
- try:
- pri = int(priority)
- self.status.bbfile_config_priorities.append((c, regex, cre, pri))
- except ValueError:
- parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
-
- def buildSetVars(self):
- """
- Setup any variables needed before starting a build
- """
- if not bb.data.getVar("BUILDNAME", self.configuration.data):
- bb.data.setVar("BUILDNAME", time.strftime('%Y%m%d%H%M'), self.configuration.data)
- bb.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime()), self.configuration.data)
-
- def matchFiles(self, buildfile):
- """
- Find the .bb files which match the expression in 'buildfile'.
- """
-
- bf = os.path.abspath(buildfile)
- filelist, masked = self.collect_bbfiles()
- try:
- os.stat(bf)
- return [bf]
- except OSError:
- regexp = re.compile(buildfile)
- matches = []
- for f in filelist:
- if regexp.search(f) and os.path.isfile(f):
- bf = f
- matches.append(f)
- return matches
-
- def matchFile(self, buildfile):
- """
- Find the .bb file which matches the expression in 'buildfile'.
- Raise an error if multiple files
- """
- matches = self.matchFiles(buildfile)
- if len(matches) != 1:
- parselog.error("Unable to match %s (%s matches found):" % (buildfile, len(matches)))
- for f in matches:
- parselog.error(" %s" % f)
- raise MultipleMatches
- return matches[0]
-
- def buildFile(self, buildfile, task):
- """
- Build the file matching regexp buildfile
- """
-
- # Parse the configuration here. We need to do it explicitly here since
- # buildFile() doesn't use the cache
- self.parseConfiguration()
-
- # If we are told to do the None task then query the default task
- if (task == None):
- task = self.configuration.cmd
-
- (fn, cls) = bb.cache.Cache.virtualfn2realfn(buildfile)
- buildfile = self.matchFile(fn)
- fn = bb.cache.Cache.realfn2virtual(buildfile, cls)
-
- self.buildSetVars()
-
- self.status = bb.cache.CacheData()
- infos = bb.cache.Cache.parse(fn, self.get_file_appends(fn), \
- self.configuration.data)
- maininfo = None
- for vfn, info in infos:
- self.status.add_from_recipeinfo(vfn, info)
- if vfn == fn:
- maininfo = info
-
- # Tweak some variables
- item = maininfo.pn
- self.status.ignored_dependencies = set()
- self.status.bbfile_priority[fn] = 1
-
- # Remove external dependencies
- self.status.task_deps[fn]['depends'] = {}
- self.status.deps[fn] = []
- self.status.rundeps[fn] = []
- self.status.runrecs[fn] = []
-
- # Remove stamp for target if force mode active
- if self.configuration.force:
- logger.verbose("Remove stamp %s, %s", task, fn)
- bb.build.del_stamp('do_%s' % task, self.status, fn)
-
- # Setup taskdata structure
- taskdata = bb.taskdata.TaskData(self.configuration.abort)
- taskdata.add_provider(self.configuration.data, self.status, item)
-
- buildname = bb.data.getVar("BUILDNAME", self.configuration.data)
- bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.configuration.event_data)
-
- # Clear locks
- bb.fetch.persistent_database_connection = {}
-
- # Execute the runqueue
- runlist = [[item, "do_%s" % task]]
-
- rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
-
- def buildFileIdle(server, rq, abort):
-
- if abort or self.state == state.stop:
- rq.finish_runqueue(True)
- elif self.state == state.shutdown:
- rq.finish_runqueue(False)
- failures = 0
- try:
- retval = rq.execute_runqueue()
- except runqueue.TaskFailure as exc:
- for fnid in exc.args:
- buildlog.error("'%s' failed" % taskdata.fn_index[fnid])
- failures += len(exc.args)
- retval = False
- if not retval:
- bb.event.fire(bb.event.BuildCompleted(buildname, item, failures), self.configuration.event_data)
- self.command.finishAsyncCommand()
- return False
- if retval is True:
- return True
- return retval
-
- self.server.register_idle_function(buildFileIdle, rq)
-
- def buildTargets(self, targets, task):
- """
- Attempt to build the targets specified
- """
-
- # Need files parsed
- self.updateCache()
-
- # If we are told to do the NULL task then query the default task
- if (task == None):
- task = self.configuration.cmd
-
- targets = self.checkPackages(targets)
-
- def buildTargetsIdle(server, rq, abort):
- if abort or self.state == state.stop:
- rq.finish_runqueue(True)
- elif self.state == state.shutdown:
- rq.finish_runqueue(False)
- failures = 0
- try:
- retval = rq.execute_runqueue()
- except runqueue.TaskFailure as exc:
- for fnid in exc.args:
- buildlog.error("'%s' failed" % taskdata.fn_index[fnid])
- failures += len(exc.args)
- retval = False
- if not retval:
- bb.event.fire(bb.event.BuildCompleted(buildname, targets, failures), self.configuration.event_data)
- self.command.finishAsyncCommand()
- return False
- if retval is True:
- return True
- return retval
-
- self.buildSetVars()
-
- buildname = bb.data.getVar("BUILDNAME", self.configuration.data)
- bb.event.fire(bb.event.BuildStarted(buildname, targets), self.configuration.event_data)
-
- localdata = data.createCopy(self.configuration.data)
- bb.data.update_data(localdata)
- bb.data.expandKeys(localdata)
-
- taskdata = bb.taskdata.TaskData(self.configuration.abort)
-
- runlist = []
- for k in targets:
- taskdata.add_provider(localdata, self.status, k)
- runlist.append([k, "do_%s" % task])
- taskdata.add_unresolved(localdata, self.status)
-
- # Clear locks
- bb.fetch.persistent_database_connection = {}
-
- rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
-
- self.server.register_idle_function(buildTargetsIdle, rq)
-
- def updateCache(self):
- if self.state == state.running:
- return
-
- if self.state != state.parsing:
- self.parseConfiguration ()
-
- # Import Psyco if available and not disabled
- import platform
- if platform.machine() in ['i386', 'i486', 'i586', 'i686']:
- if not self.configuration.disable_psyco:
- try:
- import psyco
- except ImportError:
- collectlog.info("Psyco JIT Compiler (http://psyco.sf.net) not available. Install it to increase performance.")
- else:
- psyco.bind( CookerParser.parse_next )
- else:
- collectlog.info("You have disabled Psyco. This decreases performance.")
-
- self.status = bb.cache.CacheData()
-
- ignore = bb.data.getVar("ASSUME_PROVIDED", self.configuration.data, 1) or ""
- self.status.ignored_dependencies = set(ignore.split())
-
- for dep in self.configuration.extra_assume_provided:
- self.status.ignored_dependencies.add(dep)
-
- self.handleCollections( bb.data.getVar("BBFILE_COLLECTIONS", self.configuration.data, 1) )
-
- (filelist, masked) = self.collect_bbfiles()
- bb.data.renameVar("__depends", "__base_depends", self.configuration.data)
-
- self.parser = CookerParser(self, filelist, masked)
- self.state = state.parsing
-
- if not self.parser.parse_next():
- collectlog.debug(1, "parsing complete")
- self.buildDepgraph()
- self.state = state.running
- return None
-
- return True
-
- def checkPackages(self, pkgs_to_build):
-
- if len(pkgs_to_build) == 0:
- raise NothingToBuild
-
- if 'world' in pkgs_to_build:
- self.buildWorldTargetList()
- pkgs_to_build.remove('world')
- for t in self.status.world_target:
- pkgs_to_build.append(t)
-
- return pkgs_to_build
-
- def get_bbfiles( self, path = os.getcwd() ):
- """Get list of default .bb files by reading out the current directory"""
- contents = os.listdir(path)
- bbfiles = []
- for f in contents:
- (root, ext) = os.path.splitext(f)
- if ext == ".bb":
- bbfiles.append(os.path.abspath(os.path.join(os.getcwd(), f)))
- return bbfiles
-
- def find_bbfiles( self, path ):
- """Find all the .bb and .bbappend files in a directory"""
- from os.path import join
-
- found = []
- for dir, dirs, files in os.walk(path):
- for ignored in ('SCCS', 'CVS', '.svn'):
- if ignored in dirs:
- dirs.remove(ignored)
- found += [join(dir, f) for f in files if (f.endswith('.bb') or f.endswith('.bbappend'))]
-
- return found
-
- def collect_bbfiles( self ):
- """Collect all available .bb build files"""
- parsed, cached, skipped, masked = 0, 0, 0, 0
-
- collectlog.debug(1, "collecting .bb files")
-
- files = (data.getVar( "BBFILES", self.configuration.data, 1 ) or "").split()
- data.setVar("BBFILES", " ".join(files), self.configuration.data)
-
- if not len(files):
- files = self.get_bbfiles()
-
- if not len(files):
- collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
- bb.event.fire(CookerExit(), self.configuration.event_data)
-
- newfiles = set()
- for f in files:
- if os.path.isdir(f):
- dirfiles = self.find_bbfiles(f)
- newfiles.update(dirfiles)
- else:
- globbed = glob.glob(f)
- if not globbed and os.path.exists(f):
- globbed = [f]
- newfiles.update(globbed)
-
- bbmask = bb.data.getVar('BBMASK', self.configuration.data, 1)
-
- if bbmask:
- try:
- bbmask_compiled = re.compile(bbmask)
- except sre_constants.error:
- collectlog.critical("BBMASK is not a valid regular expression, ignoring.")
- return list(newfiles), 0
-
- bbfiles = []
- bbappend = []
- for f in newfiles:
- if bbmask and bbmask_compiled.search(f):
- collectlog.debug(1, "skipping masked file %s", f)
- masked += 1
- continue
- if f.endswith('.bb'):
- bbfiles.append(f)
- elif f.endswith('.bbappend'):
- bbappend.append(f)
- else:
- collectlog.debug(1, "skipping %s: unknown file extension", f)
-
- # Build a list of .bbappend files for each .bb file
- for f in bbappend:
- base = os.path.basename(f).replace('.bbappend', '.bb')
- if not base in self.appendlist:
- self.appendlist[base] = []
- self.appendlist[base].append(f)
-
- return (bbfiles, masked)
-
- def get_file_appends(self, fn):
- """
- Returns a list of .bbappend files to apply to fn
- NB: collect_bbfiles() must have been called prior to this
- """
- f = os.path.basename(fn)
- if f in self.appendlist:
- return self.appendlist[f]
- return []
-
- def pre_serve(self):
- # Empty the environment. The environment will be populated as
- # necessary from the data store.
- #bb.utils.empty_environment()
- return
-
- def post_serve(self):
- bb.event.fire(CookerExit(), self.configuration.event_data)
-
- def shutdown(self):
- self.state = state.shutdown
-
- def stop(self):
- self.state = state.stop
-
-def server_main(cooker, func, *args):
- cooker.pre_serve()
-
- if cooker.configuration.profile:
- try:
- import cProfile as profile
- except:
- import profile
- prof = profile.Profile()
-
- ret = profile.Profile.runcall(prof, func, *args)
-
- prof.dump_stats("profile.log")
-
- # Redirect stdout to capture profile information
- pout = open('profile.log.processed', 'w')
- so = sys.stdout.fileno()
- orig_so = os.dup(sys.stdout.fileno())
- os.dup2(pout.fileno(), so)
-
- import pstats
- p = pstats.Stats('profile.log')
- p.sort_stats('time')
- p.print_stats()
- p.print_callers()
- p.sort_stats('cumulative')
- p.print_stats()
-
- os.dup2(orig_so, so)
- pout.flush()
- pout.close()
-
- print("Raw profiling information saved to profile.log and processed statistics to profile.log.processed")
-
- else:
- ret = func(*args)
-
- cooker.post_serve()
-
- return ret
-
-class CookerExit(bb.event.Event):
- """
- Notify clients of the Cooker shutdown
- """
-
- def __init__(self):
- bb.event.Event.__init__(self)
-
-class ParsingFailure(Exception):
- def __init__(self, realexception, recipe):
- self.realexception = realexception
- self.recipe = recipe
- Exception.__init__(self, "Failure when parsing %s" % recipe)
- self.args = (realexception, recipe)
-
-def parse_file(task):
- filename, appends = task
- try:
- return True, bb.cache.Cache.parse(filename, appends, parse_file.cfg)
- except Exception, exc:
- exc.recipe = filename
- raise exc
- # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
- # and for example a worker thread doesn't just exit on its own in response to
- # a SystemExit event for example.
- except BaseException, exc:
- raise ParsingFailure(exc, filename)
-
-class CookerParser(object):
- def __init__(self, cooker, filelist, masked):
- self.filelist = filelist
- self.cooker = cooker
- self.cfgdata = cooker.configuration.data
-
- # Accounting statistics
- self.parsed = 0
- self.cached = 0
- self.error = 0
- self.masked = masked
-
- self.skipped = 0
- self.virtuals = 0
- self.total = len(filelist)
-
- self.current = 0
- self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
- multiprocessing.cpu_count())
-
- self.bb_cache = bb.cache.Cache(self.cfgdata)
- self.fromcache = []
- self.willparse = []
- for filename in self.filelist:
- appends = self.cooker.get_file_appends(filename)
- if not self.bb_cache.cacheValid(filename):
- self.willparse.append((filename, appends))
- else:
- self.fromcache.append((filename, appends))
- self.toparse = self.total - len(self.fromcache)
- self.progress_chunk = max(self.toparse / 100, 1)
-
- self.start()
-
- def start(self):
- def init(cfg):
- signal.signal(signal.SIGINT, signal.SIG_IGN)
- parse_file.cfg = cfg
-
- bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
-
- self.pool = multiprocessing.Pool(self.num_processes, init, [self.cfgdata])
- parsed = self.pool.imap(parse_file, self.willparse)
- self.pool.close()
-
- self.results = itertools.chain(self.load_cached(), parsed)
-
- def shutdown(self, clean=True):
- if clean:
- event = bb.event.ParseCompleted(self.cached, self.parsed,
- self.skipped, self.masked,
- self.virtuals, self.error,
- self.total)
- bb.event.fire(event, self.cfgdata)
- else:
- self.pool.terminate()
- self.pool.join()
-
- sync = threading.Thread(target=self.bb_cache.sync)
- sync.start()
- atexit.register(lambda: sync.join())
-
- codesync = threading.Thread(target=bb.codeparser.parser_cache_save(self.cooker.configuration.data))
- codesync.start()
- atexit.register(lambda: codesync.join())
-
- def load_cached(self):
- for filename, appends in self.fromcache:
- cached, infos = self.bb_cache.load(filename, appends, self.cfgdata)
- yield not cached, infos
-
- def parse_next(self):
- try:
- parsed, result = self.results.next()
- except StopIteration:
- self.shutdown()
- return False
- except KeyboardInterrupt:
- self.shutdown(clean=False)
- raise
- except Exception as exc:
- self.shutdown(clean=False)
- bb.fatal('Error parsing %s: %s' % (exc.recipe, exc))
-
- self.current += 1
- self.virtuals += len(result)
- if parsed:
- self.parsed += 1
- if self.parsed % self.progress_chunk == 0:
- bb.event.fire(bb.event.ParseProgress(self.parsed),
- self.cfgdata)
- else:
- self.cached += 1
-
- for virtualfn, info in result:
- if info.skipped:
- self.skipped += 1
- else:
- self.bb_cache.add_info(virtualfn, info, self.cooker.status,
- parsed=parsed)
- return True
-
- def reparse(self, filename):
- infos = self.bb_cache.parse(filename,
- self.cooker.get_file_appends(filename),
- self.cfgdata)
- for vfn, info in infos:
- self.cooker.status.add_from_recipeinfo(vfn, info)
diff --git a/bitbake/lib/bb/daemonize.py b/bitbake/lib/bb/daemonize.py
deleted file mode 100644
index f0714b3af6..0000000000
--- a/bitbake/lib/bb/daemonize.py
+++ /dev/null
@@ -1,190 +0,0 @@
-"""
-Python Deamonizing helper
-
-Configurable daemon behaviors:
-
- 1.) The current working directory set to the "/" directory.
- 2.) The current file creation mode mask set to 0.
- 3.) Close all open files (1024).
- 4.) Redirect standard I/O streams to "/dev/null".
-
-A failed call to fork() now raises an exception.
-
-References:
- 1) Advanced Programming in the Unix Environment: W. Richard Stevens
- 2) Unix Programming Frequently Asked Questions:
- http://www.erlenstar.demon.co.uk/unix/faq_toc.html
-
-Modified to allow a function to be daemonized and return for
-bitbake use by Richard Purdie
-"""
-
-__author__ = "Chad J. Schroeder"
-__copyright__ = "Copyright (C) 2005 Chad J. Schroeder"
-__version__ = "0.2"
-
-# Standard Python modules.
-import os # Miscellaneous OS interfaces.
-import sys # System-specific parameters and functions.
-
-# Default daemon parameters.
-# File mode creation mask of the daemon.
-# For BitBake's children, we do want to inherit the parent umask.
-UMASK = None
-
-# Default maximum for the number of available file descriptors.
-MAXFD = 1024
-
-# The standard I/O file descriptors are redirected to /dev/null by default.
-if (hasattr(os, "devnull")):
- REDIRECT_TO = os.devnull
-else:
- REDIRECT_TO = "/dev/null"
-
-def createDaemon(function, logfile):
- """
- Detach a process from the controlling terminal and run it in the
- background as a daemon, returning control to the caller.
- """
-
- try:
- # Fork a child process so the parent can exit. This returns control to
- # the command-line or shell. It also guarantees that the child will not
- # be a process group leader, since the child receives a new process ID
- # and inherits the parent's process group ID. This step is required
- # to insure that the next call to os.setsid is successful.
- pid = os.fork()
- except OSError as e:
- raise Exception("%s [%d]" % (e.strerror, e.errno))
-
- if (pid == 0): # The first child.
- # To become the session leader of this new session and the process group
- # leader of the new process group, we call os.setsid(). The process is
- # also guaranteed not to have a controlling terminal.
- os.setsid()
-
- # Is ignoring SIGHUP necessary?
- #
- # It's often suggested that the SIGHUP signal should be ignored before
- # the second fork to avoid premature termination of the process. The
- # reason is that when the first child terminates, all processes, e.g.
- # the second child, in the orphaned group will be sent a SIGHUP.
- #
- # "However, as part of the session management system, there are exactly
- # two cases where SIGHUP is sent on the death of a process:
- #
- # 1) When the process that dies is the session leader of a session that
- # is attached to a terminal device, SIGHUP is sent to all processes
- # in the foreground process group of that terminal device.
- # 2) When the death of a process causes a process group to become
- # orphaned, and one or more processes in the orphaned group are
- # stopped, then SIGHUP and SIGCONT are sent to all members of the
- # orphaned group." [2]
- #
- # The first case can be ignored since the child is guaranteed not to have
- # a controlling terminal. The second case isn't so easy to dismiss.
- # The process group is orphaned when the first child terminates and
- # POSIX.1 requires that every STOPPED process in an orphaned process
- # group be sent a SIGHUP signal followed by a SIGCONT signal. Since the
- # second child is not STOPPED though, we can safely forego ignoring the
- # SIGHUP signal. In any case, there are no ill-effects if it is ignored.
- #
- # import signal # Set handlers for asynchronous events.
- # signal.signal(signal.SIGHUP, signal.SIG_IGN)
-
- try:
- # Fork a second child and exit immediately to prevent zombies. This
- # causes the second child process to be orphaned, making the init
- # process responsible for its cleanup. And, since the first child is
- # a session leader without a controlling terminal, it's possible for
- # it to acquire one by opening a terminal in the future (System V-
- # based systems). This second fork guarantees that the child is no
- # longer a session leader, preventing the daemon from ever acquiring
- # a controlling terminal.
- pid = os.fork() # Fork a second child.
- except OSError as e:
- raise Exception("%s [%d]" % (e.strerror, e.errno))
-
- if (pid == 0): # The second child.
- # We probably don't want the file mode creation mask inherited from
- # the parent, so we give the child complete control over permissions.
- if UMASK is not None:
- os.umask(UMASK)
- else:
- # Parent (the first child) of the second child.
- os._exit(0)
- else:
- # exit() or _exit()?
- # _exit is like exit(), but it doesn't call any functions registered
- # with atexit (and on_exit) or any registered signal handlers. It also
- # closes any open file descriptors. Using exit() may cause all stdio
- # streams to be flushed twice and any temporary files may be unexpectedly
- # removed. It's therefore recommended that child branches of a fork()
- # and the parent branch(es) of a daemon use _exit().
- return
-
- # Close all open file descriptors. This prevents the child from keeping
- # open any file descriptors inherited from the parent. There is a variety
- # of methods to accomplish this task. Three are listed below.
- #
- # Try the system configuration variable, SC_OPEN_MAX, to obtain the maximum
- # number of open file descriptors to close. If it doesn't exists, use
- # the default value (configurable).
- #
- # try:
- # maxfd = os.sysconf("SC_OPEN_MAX")
- # except (AttributeError, ValueError):
- # maxfd = MAXFD
- #
- # OR
- #
- # if (os.sysconf_names.has_key("SC_OPEN_MAX")):
- # maxfd = os.sysconf("SC_OPEN_MAX")
- # else:
- # maxfd = MAXFD
- #
- # OR
- #
- # Use the getrlimit method to retrieve the maximum file descriptor number
- # that can be opened by this process. If there is not limit on the
- # resource, use the default value.
- #
- import resource # Resource usage information.
- maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
- if (maxfd == resource.RLIM_INFINITY):
- maxfd = MAXFD
-
- # Iterate through and close all file descriptors.
-# for fd in range(0, maxfd):
-# try:
-# os.close(fd)
-# except OSError: # ERROR, fd wasn't open to begin with (ignored)
-# pass
-
- # Redirect the standard I/O file descriptors to the specified file. Since
- # the daemon has no controlling terminal, most daemons redirect stdin,
- # stdout, and stderr to /dev/null. This is done to prevent side-effects
- # from reads and writes to the standard I/O file descriptors.
-
- # This call to open is guaranteed to return the lowest file descriptor,
- # which will be 0 (stdin), since it was closed above.
-# os.open(REDIRECT_TO, os.O_RDWR) # standard input (0)
-
- # Duplicate standard input to standard output and standard error.
-# os.dup2(0, 1) # standard output (1)
-# os.dup2(0, 2) # standard error (2)
-
-
- si = file('/dev/null', 'r')
- so = file(logfile, 'w')
- se = so
-
-
- # Replace those fds with our own
- os.dup2(si.fileno(), sys.stdin.fileno())
- os.dup2(so.fileno(), sys.stdout.fileno())
- os.dup2(se.fileno(), sys.stderr.fileno())
-
- function()
-
- os._exit(0)
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py
deleted file mode 100644
index 50f2218a70..0000000000
--- a/bitbake/lib/bb/data.py
+++ /dev/null
@@ -1,338 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Data' implementations
-
-Functions for interacting with the data structure used by the
-BitBake build tools.
-
-The expandData and update_data are the most expensive
-operations. At night the cookie monster came by and
-suggested 'give me cookies on setting the variables and
-things will work out'. Taking this suggestion into account
-applying the skills from the not yet passed 'Entwurf und
-Analyse von Algorithmen' lecture and the cookie
-monster seems to be right. We will track setVar more carefully
-to have faster update_data and expandKeys operations.
-
-This is a treade-off between speed and memory again but
-the speed is more critical here.
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2005 Holger Hans Peter Freyther
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-#Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import sys, os, re
-if sys.argv[0][-5:] == "pydoc":
- path = os.path.dirname(os.path.dirname(sys.argv[1]))
-else:
- path = os.path.dirname(os.path.dirname(sys.argv[0]))
-sys.path.insert(0, path)
-from itertools import groupby
-
-from bb import data_smart
-from bb import codeparser
-import bb
-
-_dict_type = data_smart.DataSmart
-
-def init():
- """Return a new object representing the Bitbake data"""
- return _dict_type()
-
-def init_db(parent = None):
- """Return a new object representing the Bitbake data,
- optionally based on an existing object"""
- if parent:
- return parent.createCopy()
- else:
- return _dict_type()
-
-def createCopy(source):
- """Link the source set to the destination
- If one does not find the value in the destination set,
- search will go on to the source set to get the value.
- Value from source are copy-on-write. i.e. any try to
- modify one of them will end up putting the modified value
- in the destination set.
- """
- return source.createCopy()
-
-def initVar(var, d):
- """Non-destructive var init for data structure"""
- d.initVar(var)
-
-
-def setVar(var, value, d):
- """Set a variable to a given value"""
- d.setVar(var, value)
-
-
-def getVar(var, d, exp = 0):
- """Gets the value of a variable"""
- return d.getVar(var, exp)
-
-
-def renameVar(key, newkey, d):
- """Renames a variable from key to newkey"""
- d.renameVar(key, newkey)
-
-def delVar(var, d):
- """Removes a variable from the data set"""
- d.delVar(var)
-
-def setVarFlag(var, flag, flagvalue, d):
- """Set a flag for a given variable to a given value"""
- d.setVarFlag(var, flag, flagvalue)
-
-def getVarFlag(var, flag, d):
- """Gets given flag from given var"""
- return d.getVarFlag(var, flag)
-
-def delVarFlag(var, flag, d):
- """Removes a given flag from the variable's flags"""
- d.delVarFlag(var, flag)
-
-def setVarFlags(var, flags, d):
- """Set the flags for a given variable
-
- Note:
- setVarFlags will not clear previous
- flags. Think of this method as
- addVarFlags
- """
- d.setVarFlags(var, flags)
-
-def getVarFlags(var, d):
- """Gets a variable's flags"""
- return d.getVarFlags(var)
-
-def delVarFlags(var, d):
- """Removes a variable's flags"""
- d.delVarFlags(var)
-
-def keys(d):
- """Return a list of keys in d"""
- return d.keys()
-
-
-__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
-__expand_python_regexp__ = re.compile(r"\${@.+?}")
-
-def expand(s, d, varname = None):
- """Variable expansion using the data store"""
- return d.expand(s, varname)
-
-def expandKeys(alterdata, readdata = None):
- if readdata == None:
- readdata = alterdata
-
- todolist = {}
- for key in keys(alterdata):
- if not '${' in key:
- continue
-
- ekey = expand(key, readdata)
- if key == ekey:
- continue
- todolist[key] = ekey
-
- # These two for loops are split for performance to maximise the
- # usefulness of the expand cache
-
- for key in todolist:
- ekey = todolist[key]
- renameVar(key, ekey, alterdata)
-
-def inheritFromOS(d):
- """Inherit variables from the environment."""
- exportlist = bb.utils.preserved_envvars_exported()
- for s in os.environ.keys():
- try:
- setVar(s, os.environ[s], d)
- if s in exportlist:
- setVarFlag(s, "export", True, d)
- except TypeError:
- pass
-
-def emit_var(var, o=sys.__stdout__, d = init(), all=False):
- """Emit a variable to be sourced by a shell."""
- if getVarFlag(var, "python", d):
- return 0
-
- export = getVarFlag(var, "export", d)
- unexport = getVarFlag(var, "unexport", d)
- func = getVarFlag(var, "func", d)
- if not all and not export and not unexport and not func:
- return 0
-
- try:
- if all:
- oval = getVar(var, d, 0)
- val = getVar(var, d, 1)
- except (KeyboardInterrupt, bb.build.FuncFailed):
- raise
- except Exception, exc:
- o.write('# expansion of %s threw %s: %s\n' % (var, exc.__class__.__name__, str(exc)))
- return 0
-
- if all:
- commentVal = re.sub('\n', '\n#', str(oval))
- o.write('# %s=%s\n' % (var, commentVal))
-
- if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all:
- return 0
-
- varExpanded = expand(var, d)
-
- if unexport:
- o.write('unset %s\n' % varExpanded)
- return 0
-
- if not val:
- return 0
-
- val = str(val)
-
- if func:
- # NOTE: should probably check for unbalanced {} within the var
- o.write("%s() {\n%s\n}\n" % (varExpanded, val))
- return 1
-
- if export:
- o.write('export ')
-
- # if we're going to output this within doublequotes,
- # to a shell, we need to escape the quotes in the var
- alter = re.sub('"', '\\"', val.strip())
- alter = re.sub('\n', ' \\\n', alter)
- o.write('%s="%s"\n' % (varExpanded, alter))
- return 0
-
-def emit_env(o=sys.__stdout__, d = init(), all=False):
- """Emits all items in the data store in a format such that it can be sourced by a shell."""
-
- isfunc = lambda key: bool(d.getVarFlag(key, "func"))
- keys = sorted((key for key in d.keys() if not key.startswith("__")), key=isfunc)
- grouped = groupby(keys, isfunc)
- for isfunc, keys in grouped:
- for key in keys:
- emit_var(key, o, d, all and not isfunc) and o.write('\n')
-
-def export_vars(d):
- keys = (key for key in d.keys() if d.getVarFlag(key, "export"))
- ret = {}
- for k in keys:
- try:
- v = d.getVar(k, True)
- if v:
- ret[k] = v
- except (KeyboardInterrupt, bb.build.FuncFailed):
- raise
- except Exception, exc:
- pass
- return ret
-
-def export_envvars(v, d):
- for s in os.environ.keys():
- if s not in v:
- v[s] = os.environ[s]
- return v
-
-def emit_func(func, o=sys.__stdout__, d = init()):
- """Emits all items in the data store in a format such that it can be sourced by a shell."""
-
- keys = (key for key in d.keys() if not key.startswith("__") and not d.getVarFlag(key, "func"))
- for key in keys:
- emit_var(key, o, d, False) and o.write('\n')
-
- emit_var(func, o, d, False) and o.write('\n')
- newdeps = bb.codeparser.ShellParser().parse_shell(d.getVar(func, True))
- seen = set()
- while newdeps:
- deps = newdeps
- seen |= deps
- newdeps = set()
- for dep in deps:
- if bb.data.getVarFlag(dep, "func", d):
- emit_var(dep, o, d, False) and o.write('\n')
- newdeps |= bb.codeparser.ShellParser().parse_shell(d.getVar(dep, True))
- newdeps -= seen
-
-def update_data(d):
- """Performs final steps upon the datastore, including application of overrides"""
- d.finalize()
-
-def build_dependencies(key, keys, shelldeps, d):
- deps = set()
- try:
- if d.getVarFlag(key, "func"):
- if d.getVarFlag(key, "python"):
- parsedvar = d.expandWithRefs(d.getVar(key, False), key)
- parser = bb.codeparser.PythonParser()
- parser.parse_python(parsedvar.value)
- deps = deps | parser.references
- else:
- parsedvar = d.expandWithRefs(d.getVar(key, False), key)
- parser = bb.codeparser.ShellParser()
- parser.parse_shell(parsedvar.value)
- deps = deps | shelldeps
- deps = deps | parsedvar.references
- deps = deps | (keys & parser.execs) | (keys & parsedvar.execs)
- else:
- parser = d.expandWithRefs(d.getVar(key, False), key)
- deps |= parser.references
- deps = deps | (keys & parser.execs)
- deps |= set((d.getVarFlag(key, "vardeps", True) or "").split())
- deps -= set((d.getVarFlag(key, "vardepsexclude", True) or "").split())
- except:
- bb.note("Error expanding variable %s" % key)
- raise
- return deps
- #bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs)))
- #d.setVarFlag(key, "vardeps", deps)
-
-def generate_dependencies(d):
-
- keys = set(key for key in d.keys() if not key.startswith("__"))
- shelldeps = set(key for key in keys if d.getVarFlag(key, "export") and not d.getVarFlag(key, "unexport"))
-
- deps = {}
-
- tasklist = bb.data.getVar('__BBTASKS', d) or []
- for task in tasklist:
- deps[task] = build_dependencies(task, keys, shelldeps, d)
- newdeps = deps[task]
- seen = set()
- while newdeps:
- nextdeps = newdeps
- seen |= nextdeps
- newdeps = set()
- for dep in nextdeps:
- if dep not in deps:
- deps[dep] = build_dependencies(dep, keys, shelldeps, d)
- newdeps |= deps[dep]
- newdeps -= seen
- #print "For %s: %s" % (task, str(taskdeps[task]))
- return tasklist, deps
-
-def inherits_class(klass, d):
- val = getVar('__inherit_cache', d) or []
- if os.path.join('classes', '%s.bbclass' % klass) in val:
- return True
- return False
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py
deleted file mode 100644
index df9798ad58..0000000000
--- a/bitbake/lib/bb/data_smart.py
+++ /dev/null
@@ -1,428 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake Smart Dictionary Implementation
-
-Functions for interacting with the data structure used by the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2004, 2005 Seb Frankengul
-# Copyright (C) 2005, 2006 Holger Hans Peter Freyther
-# Copyright (C) 2005 Uli Luckas
-# Copyright (C) 2005 ROAD GmbH
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import copy, re
-from collections import MutableMapping
-import logging
-import bb, bb.codeparser
-from bb import utils
-from bb.COW import COWDictBase
-
-logger = logging.getLogger("BitBake.Data")
-
-__setvar_keyword__ = ["_append", "_prepend"]
-__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend)(_(?P<add>.*))?')
-__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
-__expand_python_regexp__ = re.compile(r"\${@.+?}")
-
-
-class VariableParse:
- def __init__(self, varname, d, val = None):
- self.varname = varname
- self.d = d
- self.value = val
-
- self.references = set()
- self.execs = set()
-
- def var_sub(self, match):
- key = match.group()[2:-1]
- if self.varname and key:
- if self.varname == key:
- raise Exception("variable %s references itself!" % self.varname)
- var = self.d.getVar(key, 1)
- if var is not None:
- self.references.add(key)
- return var
- else:
- return match.group()
-
- def python_sub(self, match):
- code = match.group()[3:-1]
- codeobj = compile(code.strip(), self.varname or "<expansion>", "eval")
-
- parser = bb.codeparser.PythonParser()
- parser.parse_python(code)
- self.references |= parser.references
- self.execs |= parser.execs
-
- value = utils.better_eval(codeobj, DataContext(self.d))
- return str(value)
-
-
-class DataContext(dict):
- def __init__(self, metadata, **kwargs):
- self.metadata = metadata
- dict.__init__(self, **kwargs)
- self['d'] = metadata
-
- def __missing__(self, key):
- value = self.metadata.getVar(key, True)
- if value is None or self.metadata.getVarFlag(key, 'func'):
- raise KeyError(key)
- else:
- return value
-
-class ExpansionError(Exception):
- def __init__(self, varname, expression, exception):
- self.expression = expression
- self.variablename = varname
- self.exception = exception
- self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception)
- Exception.__init__(self, self.msg)
- self.args = (varname, expression, exception)
- def __str__(self):
- return self.msg
-
-class DataSmart(MutableMapping):
- def __init__(self, special = COWDictBase.copy(), seen = COWDictBase.copy() ):
- self.dict = {}
-
- # cookie monster tribute
- self._special_values = special
- self._seen_overrides = seen
-
- self.expand_cache = {}
-
- def expandWithRefs(self, s, varname):
-
- if not isinstance(s, basestring): # sanity check
- return VariableParse(varname, self, s)
-
- if varname and varname in self.expand_cache:
- return self.expand_cache[varname]
-
- varparse = VariableParse(varname, self)
-
- while s.find('${') != -1:
- olds = s
- try:
- s = __expand_var_regexp__.sub(varparse.var_sub, s)
- s = __expand_python_regexp__.sub(varparse.python_sub, s)
- if s == olds:
- break
- except ExpansionError:
- raise
- except Exception as exc:
- raise ExpansionError(varname, s, exc)
-
- varparse.value = s
-
- if varname:
- self.expand_cache[varname] = varparse
-
- return varparse
-
- def expand(self, s, varname):
- return self.expandWithRefs(s, varname).value
-
-
- def finalize(self):
- """Performs final steps upon the datastore, including application of overrides"""
-
- overrides = (self.getVar("OVERRIDES", True) or "").split(":") or []
-
- #
- # Well let us see what breaks here. We used to iterate
- # over each variable and apply the override and then
- # do the line expanding.
- # If we have bad luck - which we will have - the keys
- # where in some order that is so important for this
- # method which we don't have anymore.
- # Anyway we will fix that and write test cases this
- # time.
-
- #
- # First we apply all overrides
- # Then we will handle _append and _prepend
- #
-
- for o in overrides:
- # calculate '_'+override
- l = len(o) + 1
-
- # see if one should even try
- if o not in self._seen_overrides:
- continue
-
- vars = self._seen_overrides[o]
- for var in vars:
- name = var[:-l]
- try:
- self.setVar(name, self.getVar(var, False))
- except Exception:
- logger.info("Untracked delVar")
-
- # now on to the appends and prepends
- for op in __setvar_keyword__:
- if op in self._special_values:
- appends = self._special_values[op] or []
- for append in appends:
- keep = []
- for (a, o) in self.getVarFlag(append, op) or []:
- if o and not o in overrides:
- keep.append((a ,o))
- continue
-
- if op is "_append":
- sval = self.getVar(append, False) or ""
- sval += a
- self.setVar(append, sval)
- elif op is "_prepend":
- sval = a + (self.getVar(append, False) or "")
- self.setVar(append, sval)
-
- # We save overrides that may be applied at some later stage
- if keep:
- self.setVarFlag(append, op, keep)
- else:
- self.delVarFlag(append, op)
-
- def initVar(self, var):
- self.expand_cache = {}
- if not var in self.dict:
- self.dict[var] = {}
-
- def _findVar(self, var):
- dest = self.dict
- while dest:
- if var in dest:
- return dest[var]
-
- if "_data" not in dest:
- break
- dest = dest["_data"]
-
- def _makeShadowCopy(self, var):
- if var in self.dict:
- return
-
- local_var = self._findVar(var)
-
- if local_var:
- self.dict[var] = copy.copy(local_var)
- else:
- self.initVar(var)
-
- def setVar(self, var, value):
- self.expand_cache = {}
- match = __setvar_regexp__.match(var)
- if match and match.group("keyword") in __setvar_keyword__:
- base = match.group('base')
- keyword = match.group("keyword")
- override = match.group('add')
- l = self.getVarFlag(base, keyword) or []
- l.append([value, override])
- self.setVarFlag(base, keyword, l)
-
- # todo make sure keyword is not __doc__ or __module__
- # pay the cookie monster
- try:
- self._special_values[keyword].add( base )
- except KeyError:
- self._special_values[keyword] = set()
- self._special_values[keyword].add( base )
-
- return
-
- if not var in self.dict:
- self._makeShadowCopy(var)
-
- # more cookies for the cookie monster
- if '_' in var:
- override = var[var.rfind('_')+1:]
- if override not in self._seen_overrides:
- self._seen_overrides[override] = set()
- self._seen_overrides[override].add( var )
-
- # setting var
- self.dict[var]["content"] = value
-
- def getVar(self, var, exp):
- value = self.getVarFlag(var, "content")
-
- if exp and value:
- return self.expand(value, var)
- return value
-
- def renameVar(self, key, newkey):
- """
- Rename the variable key to newkey
- """
- val = self.getVar(key, 0)
- if val is not None:
- self.setVar(newkey, val)
-
- for i in ('_append', '_prepend'):
- src = self.getVarFlag(key, i)
- if src is None:
- continue
-
- dest = self.getVarFlag(newkey, i) or []
- dest.extend(src)
- self.setVarFlag(newkey, i, dest)
-
- if i in self._special_values and key in self._special_values[i]:
- self._special_values[i].remove(key)
- self._special_values[i].add(newkey)
-
- self.delVar(key)
-
- def delVar(self, var):
- self.expand_cache = {}
- self.dict[var] = {}
-
- def setVarFlag(self, var, flag, flagvalue):
- if not var in self.dict:
- self._makeShadowCopy(var)
- self.dict[var][flag] = flagvalue
-
- def getVarFlag(self, var, flag, expand=False):
- local_var = self._findVar(var)
- value = None
- if local_var:
- if flag in local_var:
- value = copy.copy(local_var[flag])
- elif flag == "content" and "defaultval" in local_var:
- value = copy.copy(local_var["defaultval"])
- if expand and value:
- value = self.expand(value, None)
- return value
-
- def delVarFlag(self, var, flag):
- local_var = self._findVar(var)
- if not local_var:
- return
- if not var in self.dict:
- self._makeShadowCopy(var)
-
- if var in self.dict and flag in self.dict[var]:
- del self.dict[var][flag]
-
- def setVarFlags(self, var, flags):
- if not var in self.dict:
- self._makeShadowCopy(var)
-
- for i in flags:
- if i == "content":
- continue
- self.dict[var][i] = flags[i]
-
- def getVarFlags(self, var):
- local_var = self._findVar(var)
- flags = {}
-
- if local_var:
- for i in local_var:
- if i == "content":
- continue
- flags[i] = local_var[i]
-
- if len(flags) == 0:
- return None
- return flags
-
-
- def delVarFlags(self, var):
- if not var in self.dict:
- self._makeShadowCopy(var)
-
- if var in self.dict:
- content = None
-
- # try to save the content
- if "content" in self.dict[var]:
- content = self.dict[var]["content"]
- self.dict[var] = {}
- self.dict[var]["content"] = content
- else:
- del self.dict[var]
-
-
- def createCopy(self):
- """
- Create a copy of self by setting _data to self
- """
- # we really want this to be a DataSmart...
- data = DataSmart(seen=self._seen_overrides.copy(), special=self._special_values.copy())
- data.dict["_data"] = self.dict
-
- return data
-
- def expandVarref(self, variable, parents=False):
- """Find all references to variable in the data and expand it
- in place, optionally descending to parent datastores."""
-
- if parents:
- keys = iter(self)
- else:
- keys = self.localkeys()
-
- ref = '${%s}' % variable
- value = self.getVar(variable, False)
- for key in keys:
- referrervalue = self.getVar(key, False)
- if referrervalue and ref in referrervalue:
- self.setVar(key, referrervalue.replace(ref, value))
-
- def localkeys(self):
- for key in self.dict:
- if key != '_data':
- yield key
-
- def __iter__(self):
- seen = set()
- def _keys(d):
- if "_data" in d:
- for key in _keys(d["_data"]):
- yield key
-
- for key in d:
- if key != "_data":
- if not key in seen:
- seen.add(key)
- yield key
- return _keys(self.dict)
-
- def __len__(self):
- return len(frozenset(self))
-
- def __getitem__(self, item):
- value = self.getVar(item, False)
- if value is None:
- raise KeyError(item)
- else:
- return value
-
- def __setitem__(self, var, value):
- self.setVar(var, value)
-
- def __delitem__(self, var):
- self.delVar(var)
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py
deleted file mode 100644
index 3467ddd613..0000000000
--- a/bitbake/lib/bb/event.py
+++ /dev/null
@@ -1,386 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Event' implementation
-
-Classes and functions for manipulating 'events' in the
-BitBake build tools.
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os, sys
-import warnings
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
-import logging
-import atexit
-import bb.utils
-
-# This is the pid for which we should generate the event. This is set when
-# the runqueue forks off.
-worker_pid = 0
-worker_pipe = None
-
-class Event(object):
- """Base class for events"""
-
- def __init__(self):
- self.pid = worker_pid
-
-NotHandled = 0
-Handled = 1
-
-Registered = 10
-AlreadyRegistered = 14
-
-# Internal
-_handlers = {}
-_ui_handlers = {}
-_ui_handler_seq = 0
-
-# For compatibility
-bb.utils._context["NotHandled"] = NotHandled
-bb.utils._context["Handled"] = Handled
-
-def fire_class_handlers(event, d):
- if isinstance(event, logging.LogRecord):
- return
-
- for handler in _handlers:
- h = _handlers[handler]
- event.data = d
- if type(h).__name__ == "code":
- locals = {"e": event}
- bb.utils.simple_exec(h, locals)
- ret = bb.utils.better_eval("tmpHandler(e)", locals)
- if ret is not None:
- warnings.warn("Using Handled/NotHandled in event handlers is deprecated",
- DeprecationWarning, stacklevel = 2)
- else:
- h(event)
- del event.data
-
-ui_queue = []
-@atexit.register
-def print_ui_queue():
- """If we're exiting before a UI has been spawned, display any queued
- LogRecords to the console."""
- logger = logging.getLogger("BitBake")
- if not _ui_handlers:
- from bb.msg import BBLogFormatter
- console = logging.StreamHandler(sys.stdout)
- console.setFormatter(BBLogFormatter("%(levelname)s: %(message)s"))
- logger.handlers = [console]
- while ui_queue:
- event = ui_queue.pop()
- if isinstance(event, logging.LogRecord):
- logger.handle(event)
-
-def fire_ui_handlers(event, d):
- if not _ui_handlers:
- # No UI handlers registered yet, queue up the messages
- ui_queue.append(event)
- return
-
- errors = []
- for h in _ui_handlers:
- #print "Sending event %s" % event
- try:
- # We use pickle here since it better handles object instances
- # which xmlrpc's marshaller does not. Events *must* be serializable
- # by pickle.
- _ui_handlers[h].event.send((pickle.dumps(event)))
- except:
- errors.append(h)
- for h in errors:
- del _ui_handlers[h]
-
-def fire(event, d):
- """Fire off an Event"""
-
- # We can fire class handlers in the worker process context and this is
- # desired so they get the task based datastore.
- # UI handlers need to be fired in the server context so we defer this. They
- # don't have a datastore so the datastore context isn't a problem.
-
- fire_class_handlers(event, d)
- if worker_pid != 0:
- worker_fire(event, d)
- else:
- fire_ui_handlers(event, d)
-
-def worker_fire(event, d):
- data = "<event>" + pickle.dumps(event) + "</event>"
- worker_pipe.write(data)
-
-def fire_from_worker(event, d):
- if not event.startswith("<event>") or not event.endswith("</event>"):
- print("Error, not an event %s" % event)
- return
- event = pickle.loads(event[7:-8])
- fire_ui_handlers(event, d)
-
-def register(name, handler):
- """Register an Event handler"""
-
- # already registered
- if name in _handlers:
- return AlreadyRegistered
-
- if handler is not None:
- # handle string containing python code
- if isinstance(handler, basestring):
- tmp = "def tmpHandler(e):\n%s" % handler
- comp = bb.utils.better_compile(tmp, "tmpHandler(e)", "bb.event._registerCode")
- _handlers[name] = comp
- else:
- _handlers[name] = handler
-
- return Registered
-
-def remove(name, handler):
- """Remove an Event handler"""
- _handlers.pop(name)
-
-def register_UIHhandler(handler):
- bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
- _ui_handlers[_ui_handler_seq] = handler
- return _ui_handler_seq
-
-def unregister_UIHhandler(handlerNum):
- if handlerNum in _ui_handlers:
- del _ui_handlers[handlerNum]
- return
-
-def getName(e):
- """Returns the name of a class or class instance"""
- if getattr(e, "__name__", None) == None:
- return e.__class__.__name__
- else:
- return e.__name__
-
-class ConfigParsed(Event):
- """Configuration Parsing Complete"""
-
-class RecipeParsed(Event):
- """ Recipe Parsing Complete """
-
- def __init__(self, fn):
- self.fn = fn
- Event.__init__(self)
-
-class StampUpdate(Event):
- """Trigger for any adjustment of the stamp files to happen"""
-
- def __init__(self, targets, stampfns):
- self._targets = targets
- self._stampfns = stampfns
- Event.__init__(self)
-
- def getStampPrefix(self):
- return self._stampfns
-
- def getTargets(self):
- return self._targets
-
- stampPrefix = property(getStampPrefix)
- targets = property(getTargets)
-
-class BuildBase(Event):
- """Base class for bbmake run events"""
-
- def __init__(self, n, p, failures = 0):
- self._name = n
- self._pkgs = p
- Event.__init__(self)
- self._failures = failures
-
- def getPkgs(self):
- return self._pkgs
-
- def setPkgs(self, pkgs):
- self._pkgs = pkgs
-
- def getName(self):
- return self._name
-
- def setName(self, name):
- self._name = name
-
- def getCfg(self):
- return self.data
-
- def setCfg(self, cfg):
- self.data = cfg
-
- def getFailures(self):
- """
- Return the number of failed packages
- """
- return self._failures
-
- pkgs = property(getPkgs, setPkgs, None, "pkgs property")
- name = property(getName, setName, None, "name property")
- cfg = property(getCfg, setCfg, None, "cfg property")
-
-
-
-
-
-class BuildStarted(BuildBase):
- """bbmake build run started"""
-
-
-class BuildCompleted(BuildBase):
- """bbmake build run completed"""
-
-
-
-
-class NoProvider(Event):
- """No Provider for an Event"""
-
- def __init__(self, item, runtime=False, dependees=None):
- Event.__init__(self)
- self._item = item
- self._runtime = runtime
- self._dependees = dependees
-
- def getItem(self):
- return self._item
-
- def isRuntime(self):
- return self._runtime
-
-class MultipleProviders(Event):
- """Multiple Providers"""
-
- def __init__(self, item, candidates, runtime = False):
- Event.__init__(self)
- self._item = item
- self._candidates = candidates
- self._is_runtime = runtime
-
- def isRuntime(self):
- """
- Is this a runtime issue?
- """
- return self._is_runtime
-
- def getItem(self):
- """
- The name for the to be build item
- """
- return self._item
-
- def getCandidates(self):
- """
- Get the possible Candidates for a PROVIDER.
- """
- return self._candidates
-
-class ParseStarted(Event):
- """Recipe parsing for the runqueue has begun"""
- def __init__(self, total):
- Event.__init__(self)
- self.total = total
-
-class ParseCompleted(Event):
- """Recipe parsing for the runqueue has completed"""
-
- def __init__(self, cached, parsed, skipped, masked, virtuals, errors, total):
- Event.__init__(self)
- self.cached = cached
- self.parsed = parsed
- self.skipped = skipped
- self.virtuals = virtuals
- self.masked = masked
- self.errors = errors
- self.sofar = cached + parsed
- self.total = total
-
-class ParseProgress(Event):
- """Recipe parsing progress"""
-
- def __init__(self, current):
- self.current = current
-
-class CacheLoadStarted(Event):
- """Loading of the dependency cache has begun"""
- def __init__(self, total):
- Event.__init__(self)
- self.total = total
-
-class CacheLoadProgress(Event):
- """Cache loading progress"""
- def __init__(self, current):
- Event.__init__(self)
- self.current = current
-
-class CacheLoadCompleted(Event):
- """Cache loading is complete"""
- def __init__(self, total, num_entries):
- Event.__init__(self)
- self.total = total
- self.num_entries = num_entries
-
-
-class DepTreeGenerated(Event):
- """
- Event when a dependency tree has been generated
- """
-
- def __init__(self, depgraph):
- Event.__init__(self)
- self._depgraph = depgraph
-
-class MsgBase(Event):
- """Base class for messages"""
-
- def __init__(self, msg):
- self._message = msg
- Event.__init__(self)
-
-class MsgDebug(MsgBase):
- """Debug Message"""
-
-class MsgNote(MsgBase):
- """Note Message"""
-
-class MsgWarn(MsgBase):
- """Warning Message"""
-
-class MsgError(MsgBase):
- """Error Message"""
-
-class MsgFatal(MsgBase):
- """Fatal Message"""
-
-class MsgPlain(MsgBase):
- """General output"""
-
-class LogHandler(logging.Handler):
- """Dispatch logging messages as bitbake events"""
-
- def emit(self, record):
- fire(record, None)
-
- def filter(self, record):
- record.taskpid = worker_pid
- return True
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py
deleted file mode 100644
index 2f92d87d96..0000000000
--- a/bitbake/lib/bb/fetch/__init__.py
+++ /dev/null
@@ -1,836 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-from __future__ import absolute_import
-from __future__ import print_function
-import os, re
-import logging
-import bb
-from bb import data
-from bb import persist_data
-from bb import utils
-
-__version__ = "1"
-
-logger = logging.getLogger("BitBake.Fetch")
-
-class MalformedUrl(Exception):
- """Exception raised when encountering an invalid url"""
-
-class FetchError(Exception):
- """Exception raised when a download fails"""
-
-class NoMethodError(Exception):
- """Exception raised when there is no method to obtain a supplied url or set of urls"""
-
-class MissingParameterError(Exception):
- """Exception raised when a fetch method is missing a critical parameter in the url"""
-
-class ParameterError(Exception):
- """Exception raised when a url cannot be proccessed due to invalid parameters."""
-
-class MD5SumError(Exception):
- """Exception raised when a MD5SUM of a file does not match the expected one"""
-
-class InvalidSRCREV(Exception):
- """Exception raised when an invalid SRCREV is encountered"""
-
-def decodeurl(url):
- """Decodes an URL into the tokens (scheme, network location, path,
- user, password, parameters).
- """
-
- m = re.compile('(?P<type>[^:]*)://((?P<user>.+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
- if not m:
- raise MalformedUrl(url)
-
- type = m.group('type')
- location = m.group('location')
- if not location:
- raise MalformedUrl(url)
- user = m.group('user')
- parm = m.group('parm')
-
- locidx = location.find('/')
- if locidx != -1 and type.lower() != 'file':
- host = location[:locidx]
- path = location[locidx:]
- else:
- host = ""
- path = location
- if user:
- m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
- if m:
- user = m.group('user')
- pswd = m.group('pswd')
- else:
- user = ''
- pswd = ''
-
- p = {}
- if parm:
- for s in parm.split(';'):
- s1, s2 = s.split('=')
- p[s1] = s2
-
- return (type, host, path, user, pswd, p)
-
-def encodeurl(decoded):
- """Encodes a URL from tokens (scheme, network location, path,
- user, password, parameters).
- """
-
- (type, host, path, user, pswd, p) = decoded
-
- if not type or not path:
- raise MissingParameterError("Type or path url components missing when encoding %s" % decoded)
- url = '%s://' % type
- if user:
- url += "%s" % user
- if pswd:
- url += ":%s" % pswd
- url += "@"
- if host:
- url += "%s" % host
- url += "%s" % path
- if p:
- for parm in p:
- url += ";%s=%s" % (parm, p[parm])
-
- return url
-
-def uri_replace(uri, uri_find, uri_replace, d):
- if not uri or not uri_find or not uri_replace:
- logger.debug(1, "uri_replace: passed an undefined value, not replacing")
- uri_decoded = list(decodeurl(uri))
- uri_find_decoded = list(decodeurl(uri_find))
- uri_replace_decoded = list(decodeurl(uri_replace))
- result_decoded = ['', '', '', '', '', {}]
- for i in uri_find_decoded:
- loc = uri_find_decoded.index(i)
- result_decoded[loc] = uri_decoded[loc]
- if isinstance(i, basestring):
- if (re.match(i, uri_decoded[loc])):
- result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc])
- if uri_find_decoded.index(i) == 2:
- if d:
- localfn = bb.fetch.localpath(uri, d)
- if localfn:
- result_decoded[loc] = os.path.join(os.path.dirname(result_decoded[loc]), os.path.basename(bb.fetch.localpath(uri, d)))
- else:
- return uri
- return encodeurl(result_decoded)
-
-methods = []
-urldata_cache = {}
-saved_headrevs = {}
-
-def fetcher_init(d):
- """
- Called to initialize the fetchers once the configuration data is known.
- Calls before this must not hit the cache.
- """
- pd = persist_data.persist(d)
- # When to drop SCM head revisions controlled by user policy
- srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
- if srcrev_policy == "cache":
- logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
- elif srcrev_policy == "clear":
- logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
- try:
- bb.fetch.saved_headrevs = pd['BB_URI_HEADREVS'].items()
- except:
- pass
- del pd['BB_URI_HEADREVS']
- else:
- raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
-
- for m in methods:
- if hasattr(m, "init"):
- m.init(d)
-
-def fetcher_compare_revisions(d):
- """
- Compare the revisions in the persistant cache with current values and
- return true/false on whether they've changed.
- """
-
- pd = persist_data.persist(d)
- data = pd['BB_URI_HEADREVS'].items()
- data2 = bb.fetch.saved_headrevs
-
- changed = False
- for key in data:
- if key not in data2 or data2[key] != data[key]:
- logger.debug(1, "%s changed", key)
- changed = True
- return True
- else:
- logger.debug(2, "%s did not change", key)
- return False
-
-# Function call order is usually:
-# 1. init
-# 2. go
-# 3. localpaths
-# localpath can be called at any time
-
-def init(urls, d, setup = True):
- urldata = {}
-
- fn = bb.data.getVar('FILE', d, 1)
- if fn in urldata_cache:
- urldata = urldata_cache[fn]
-
- for url in urls:
- if url not in urldata:
- urldata[url] = FetchData(url, d)
-
- if setup:
- for url in urldata:
- if not urldata[url].setup:
- urldata[url].setup_localpath(d)
-
- urldata_cache[fn] = urldata
- return urldata
-
-def mirror_from_string(data):
- return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
-
-def verify_checksum(u, ud, d):
- """
- verify the MD5 and SHA256 checksum for downloaded src
-
- return value:
- - True: checksum matched
- - False: checksum unmatched
-
- if checksum is missing in recipes file, "BB_STRICT_CHECKSUM" decide the return value.
- if BB_STRICT_CHECKSUM = "1" then return false as unmatched, otherwise return true as
- matched
- """
-
- if not ud.type in ["http", "https", "ftp", "ftps"]:
- return
-
- md5data = bb.utils.md5_file(ud.localpath)
- sha256data = bb.utils.sha256_file(ud.localpath)
-
- if (ud.md5_expected == None or ud.sha256_expected == None):
- logger.warn('Missing SRC_URI checksum for %s, consider adding to the recipe:\n'
- 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"',
- ud.localpath, ud.md5_name, md5data,
- ud.sha256_name, sha256data)
- if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1":
- raise FetchError("No checksum specified for %s." % u)
- return
-
- if (ud.md5_expected != md5data or ud.sha256_expected != sha256data):
- logger.error('The checksums for "%s" did not match.\n'
- ' MD5: expected "%s", got "%s"\n'
- ' SHA256: expected "%s", got "%s"\n',
- ud.localpath, ud.md5_expected, md5data,
- ud.sha256_expected, sha256data)
- raise FetchError("%s checksum mismatch." % u)
-
-def go(d, urls = None):
- """
- Fetch all urls
- init must have previously been called
- """
- if not urls:
- urls = d.getVar("SRC_URI", 1).split()
- urldata = init(urls, d, True)
-
- for u in urls:
- ud = urldata[u]
- m = ud.method
- localpath = ""
-
- if not ud.localfile:
- continue
-
- lf = bb.utils.lockfile(ud.lockfile)
-
- if m.try_premirror(u, ud, d):
- # First try fetching uri, u, from PREMIRRORS
- mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True))
- localpath = try_mirrors(d, u, mirrors, False, m.forcefetch(u, ud, d))
- elif os.path.exists(ud.localfile):
- localpath = ud.localfile
-
- # Need to re-test forcefetch() which will return true if our copy is too old
- if m.forcefetch(u, ud, d) or not localpath:
- # Next try fetching from the original uri, u
- try:
- m.go(u, ud, d)
- localpath = ud.localpath
- except FetchError:
- # Remove any incomplete file
- bb.utils.remove(ud.localpath)
- # Finally, try fetching uri, u, from MIRRORS
- mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True))
- localpath = try_mirrors (d, u, mirrors)
- if not localpath or not os.path.exists(localpath):
- raise FetchError("Unable to fetch URL %s from any source." % u)
-
- ud.localpath = localpath
-
- if os.path.exists(ud.md5):
- # Touch the md5 file to show active use of the download
- try:
- os.utime(ud.md5, None)
- except:
- # Errors aren't fatal here
- pass
- else:
- # Only check the checksums if we've not seen this item before
- verify_checksum(u, ud, d)
- Fetch.write_md5sum(u, ud, d)
-
- bb.utils.unlockfile(lf)
-
-def checkstatus(d, urls = None):
- """
- Check all urls exist upstream
- init must have previously been called
- """
- urldata = init([], d, True)
-
- if not urls:
- urls = urldata
-
- for u in urls:
- ud = urldata[u]
- m = ud.method
- logger.debug(1, "Testing URL %s", u)
- # First try checking uri, u, from PREMIRRORS
- mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True))
- ret = try_mirrors(d, u, mirrors, True)
- if not ret:
- # Next try checking from the original uri, u
- try:
- ret = m.checkstatus(u, ud, d)
- except:
- # Finally, try checking uri, u, from MIRRORS
- mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True))
- ret = try_mirrors (d, u, mirrors, True)
-
- if not ret:
- raise FetchError("URL %s doesn't work" % u)
-
-def localpaths(d):
- """
- Return a list of the local filenames, assuming successful fetch
- """
- local = []
- urldata = init([], d, True)
-
- for u in urldata:
- ud = urldata[u]
- local.append(ud.localpath)
-
- return local
-
-srcrev_internal_call = False
-
-def get_autorev(d):
- return get_srcrev(d)
-
-def get_srcrev(d):
- """
- Return the version string for the current package
- (usually to be used as PV)
- Most packages usually only have one SCM so we just pass on the call.
- In the multi SCM case, we build a value based on SRCREV_FORMAT which must
- have been set.
- """
-
- #
- # Ugly code alert. localpath in the fetchers will try to evaluate SRCREV which
- # could translate into a call to here. If it does, we need to catch this
- # and provide some way so it knows get_srcrev is active instead of being
- # some number etc. hence the srcrev_internal_call tracking and the magic
- # "SRCREVINACTION" return value.
- #
- # Neater solutions welcome!
- #
- if bb.fetch.srcrev_internal_call:
- return "SRCREVINACTION"
-
- scms = []
-
- # Only call setup_localpath on URIs which supports_srcrev()
- urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False)
- for u in urldata:
- ud = urldata[u]
- if ud.method.supports_srcrev():
- if not ud.setup:
- ud.setup_localpath(d)
- scms.append(u)
-
- if len(scms) == 0:
- logger.error("SRCREV was used yet no valid SCM was found in SRC_URI")
- raise ParameterError
-
- if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache":
- bb.data.setVar('__BB_DONT_CACHE', '1', d)
-
- if len(scms) == 1:
- return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d)
-
- #
- # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
- #
- format = bb.data.getVar('SRCREV_FORMAT', d, 1)
- if not format:
- logger.error("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
- raise ParameterError
-
- for scm in scms:
- if 'name' in urldata[scm].parm:
- name = urldata[scm].parm["name"]
- rev = urldata[scm].method.sortable_revision(scm, urldata[scm], d)
- format = format.replace(name, rev)
-
- return format
-
-def localpath(url, d, cache = True):
- """
- Called from the parser with cache=False since the cache isn't ready
- at this point. Also called from classed in OE e.g. patch.bbclass
- """
- ud = init([url], d)
- if ud[url].method:
- return ud[url].localpath
- return url
-
-def runfetchcmd(cmd, d, quiet = False):
- """
- Run cmd returning the command output
- Raise an error if interrupted or cmd fails
- Optionally echo command output to stdout
- """
-
- # Need to export PATH as binary could be in metadata paths
- # rather than host provided
- # Also include some other variables.
- # FIXME: Should really include all export varaiables?
- exportvars = ['PATH', 'GIT_PROXY_COMMAND', 'GIT_PROXY_HOST',
- 'GIT_PROXY_PORT', 'GIT_CONFIG', 'http_proxy', 'ftp_proxy',
- 'https_proxy', 'no_proxy', 'ALL_PROXY', 'all_proxy',
- 'KRB5CCNAME', 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', 'HOME']
-
- for var in exportvars:
- val = data.getVar(var, d, True)
- if val:
- cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
-
- logger.debug(1, "Running %s", cmd)
-
- # redirect stderr to stdout
- stdout_handle = os.popen(cmd + " 2>&1", "r")
- output = ""
-
- while True:
- line = stdout_handle.readline()
- if not line:
- break
- if not quiet:
- print(line, end=' ')
- output += line
-
- status = stdout_handle.close() or 0
- signal = status >> 8
- exitstatus = status & 0xff
-
- if signal:
- raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output))
- elif status != 0:
- raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output))
-
- return output
-
-def try_mirrors(d, uri, mirrors, check = False, force = False):
- """
- Try to use a mirrored version of the sources.
- This method will be automatically called before the fetchers go.
-
- d Is a bb.data instance
- uri is the original uri we're trying to download
- mirrors is the list of mirrors we're going to try
- """
- fpath = os.path.join(data.getVar("DL_DIR", d, 1), os.path.basename(uri))
- if not check and os.access(fpath, os.R_OK) and not force:
- logger.debug(1, "%s already exists, skipping checkout.", fpath)
- return fpath
-
- ld = d.createCopy()
- for (find, replace) in mirrors:
- newuri = uri_replace(uri, find, replace, ld)
- if newuri != uri:
- try:
- ud = FetchData(newuri, ld)
- except bb.fetch.NoMethodError:
- logger.debug(1, "No method for %s", uri)
- continue
-
- ud.setup_localpath(ld)
-
- try:
- if check:
- found = ud.method.checkstatus(newuri, ud, ld)
- if found:
- return found
- else:
- ud.method.go(newuri, ud, ld)
- return ud.localpath
- except (bb.fetch.MissingParameterError,
- bb.fetch.FetchError,
- bb.fetch.MD5SumError):
- import sys
- (type, value, traceback) = sys.exc_info()
- logger.debug(2, "Mirror fetch failure: %s", value)
- bb.utils.remove(ud.localpath)
- continue
- return None
-
-
-class FetchData(object):
- """
- A class which represents the fetcher state for a given URI.
- """
- def __init__(self, url, d):
- self.localfile = ""
- (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
- self.date = Fetch.getSRCDate(self, d)
- self.url = url
- if not self.user and "user" in self.parm:
- self.user = self.parm["user"]
- if not self.pswd and "pswd" in self.parm:
- self.pswd = self.parm["pswd"]
- self.setup = False
-
- if "name" in self.parm:
- self.md5_name = "%s.md5sum" % self.parm["name"]
- self.sha256_name = "%s.sha256sum" % self.parm["name"]
- else:
- self.md5_name = "md5sum"
- self.sha256_name = "sha256sum"
- self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d)
- self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d)
-
- for m in methods:
- if m.supports(url, self, d):
- self.method = m
- return
- raise NoMethodError("Missing implementation for url %s" % url)
-
- def setup_localpath(self, d):
- self.setup = True
- if "localpath" in self.parm:
- # if user sets localpath for file, use it instead.
- self.localpath = self.parm["localpath"]
- self.basename = os.path.basename(self.localpath)
- else:
- premirrors = bb.data.getVar('PREMIRRORS', d, True)
- local = ""
- if premirrors and self.url:
- aurl = self.url.split(";")[0]
- mirrors = mirror_from_string(premirrors)
- for (find, replace) in mirrors:
- if replace.startswith("file://"):
- path = aurl.split("://")[1]
- path = path.split(";")[0]
- local = replace.split("://")[1] + os.path.basename(path)
- if local == aurl or not os.path.exists(local) or os.path.isdir(local):
- local = ""
- self.localpath = local
- if not local:
- try:
- bb.fetch.srcrev_internal_call = True
- self.localpath = self.method.localpath(self.url, self, d)
- finally:
- bb.fetch.srcrev_internal_call = False
- # We have to clear data's internal caches since the cached value of SRCREV is now wrong.
- # Horrible...
- bb.data.delVar("ISHOULDNEVEREXIST", d)
-
- if self.localpath is not None:
- # Note: These files should always be in DL_DIR whereas localpath may not be.
- basepath = bb.data.expand("${DL_DIR}/%s" % os.path.basename(self.localpath), d)
- self.md5 = basepath + '.md5'
- self.lockfile = basepath + '.lock'
-
-
-class Fetch(object):
- """Base class for 'fetch'ing data"""
-
- def __init__(self, urls = []):
- self.urls = []
-
- def supports(self, url, urldata, d):
- """
- Check to see if this fetch class supports a given url.
- """
- return 0
-
- def localpath(self, url, urldata, d):
- """
- Return the local filename of a given url assuming a successful fetch.
- Can also setup variables in urldata for use in go (saving code duplication
- and duplicate code execution)
- """
- return url
- def _strip_leading_slashes(self, relpath):
- """
- Remove leading slash as os.path.join can't cope
- """
- while os.path.isabs(relpath):
- relpath = relpath[1:]
- return relpath
-
- def setUrls(self, urls):
- self.__urls = urls
-
- def getUrls(self):
- return self.__urls
-
- urls = property(getUrls, setUrls, None, "Urls property")
-
- def forcefetch(self, url, urldata, d):
- """
- Force a fetch, even if localpath exists?
- """
- return False
-
- def supports_srcrev(self):
- """
- The fetcher supports auto source revisions (SRCREV)
- """
- return False
-
- def go(self, url, urldata, d):
- """
- Fetch urls
- Assumes localpath was called first
- """
- raise NoMethodError("Missing implementation for url")
-
- def try_premirror(self, url, urldata, d):
- """
- Should premirrors be used?
- """
- if urldata.method.forcefetch(url, urldata, d):
- return True
- elif os.path.exists(urldata.md5) and os.path.exists(urldata.localfile):
- return False
- else:
- return True
-
- def checkstatus(self, url, urldata, d):
- """
- Check the status of a URL
- Assumes localpath was called first
- """
- logger.info("URL %s could not be checked for status since no method exists.", url)
- return True
-
- def getSRCDate(urldata, d):
- """
- Return the SRC Date for the component
-
- d the bb.data module
- """
- if "srcdate" in urldata.parm:
- return urldata.parm['srcdate']
-
- pn = data.getVar("PN", d, 1)
-
- if pn:
- return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
-
- return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
- getSRCDate = staticmethod(getSRCDate)
-
- def srcrev_internal_helper(ud, d):
- """
- Return:
- a) a source revision if specified
- b) True if auto srcrev is in action
- c) False otherwise
- """
-
- if 'rev' in ud.parm:
- return ud.parm['rev']
-
- if 'tag' in ud.parm:
- return ud.parm['tag']
-
- rev = None
- if 'name' in ud.parm:
- pn = data.getVar("PN", d, 1)
- rev = data.getVar("SRCREV_%s_pn-%s" % (ud.parm['name'], pn), d, 1)
- if not rev:
- rev = data.getVar("SRCREV_pn-%s_%s" % (pn, ud.parm['name']), d, 1)
- if not rev:
- rev = data.getVar("SRCREV_%s" % (ud.parm['name']), d, 1)
- if not rev:
- rev = data.getVar("SRCREV", d, 1)
- if rev == "INVALID":
- raise InvalidSRCREV("Please set SRCREV to a valid value")
- if not rev:
- return False
- if rev == "SRCREVINACTION":
- return True
- return rev
-
- srcrev_internal_helper = staticmethod(srcrev_internal_helper)
-
- def localcount_internal_helper(ud, d):
- """
- Return:
- a) a locked localcount if specified
- b) None otherwise
- """
-
- localcount = None
- if 'name' in ud.parm:
- pn = data.getVar("PN", d, 1)
- localcount = data.getVar("LOCALCOUNT_" + ud.parm['name'], d, 1)
- if not localcount:
- localcount = data.getVar("LOCALCOUNT", d, 1)
- return localcount
-
- localcount_internal_helper = staticmethod(localcount_internal_helper)
-
- def verify_md5sum(ud, got_sum):
- """
- Verify the md5sum we wanted with the one we got
- """
- wanted_sum = ud.parm.get('md5sum')
- if not wanted_sum:
- return True
-
- return wanted_sum == got_sum
- verify_md5sum = staticmethod(verify_md5sum)
-
- def write_md5sum(url, ud, d):
- md5data = bb.utils.md5_file(ud.localpath)
- # verify the md5sum
- if not Fetch.verify_md5sum(ud, md5data):
- raise MD5SumError(url)
-
- md5out = file(ud.md5, 'w')
- md5out.write(md5data)
- md5out.close()
- write_md5sum = staticmethod(write_md5sum)
-
- def latest_revision(self, url, ud, d):
- """
- Look in the cache for the latest revision, if not present ask the SCM.
- """
- if not hasattr(self, "_latest_revision"):
- raise ParameterError
-
- pd = persist_data.persist(d)
- revs = pd['BB_URI_HEADREVS']
- key = self.generate_revision_key(url, ud, d)
- rev = revs[key]
- if rev != None:
- return str(rev)
-
- revs[key] = rev = self._latest_revision(url, ud, d)
- return rev
-
- def sortable_revision(self, url, ud, d):
- """
-
- """
- if hasattr(self, "_sortable_revision"):
- return self._sortable_revision(url, ud, d)
-
- pd = persist_data.persist(d)
- localcounts = pd['BB_URI_LOCALCOUNT']
- key = self.generate_revision_key(url, ud, d)
-
- latest_rev = self._build_revision(url, ud, d)
- last_rev = localcounts[key + '_rev']
- uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
- count = None
- if uselocalcount:
- count = Fetch.localcount_internal_helper(ud, d)
- if count is None:
- count = localcounts[key + '_count']
-
- if last_rev == latest_rev:
- return str(count + "+" + latest_rev)
-
- buildindex_provided = hasattr(self, "_sortable_buildindex")
- if buildindex_provided:
- count = self._sortable_buildindex(url, ud, d, latest_rev)
-
- if count is None:
- count = "0"
- elif uselocalcount or buildindex_provided:
- count = str(count)
- else:
- count = str(int(count) + 1)
-
- localcounts[key + '_rev'] = latest_rev
- localcounts[key + '_count'] = count
-
- return str(count + "+" + latest_rev)
-
- def generate_revision_key(self, url, ud, d):
- key = self._revision_key(url, ud, d)
- return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
-
-from . import cvs
-from . import git
-from . import local
-from . import svn
-from . import wget
-from . import svk
-from . import ssh
-from . import perforce
-from . import bzr
-from . import hg
-from . import osc
-from . import repo
-
-methods.append(local.Local())
-methods.append(wget.Wget())
-methods.append(svn.Svn())
-methods.append(git.Git())
-methods.append(cvs.Cvs())
-methods.append(svk.Svk())
-methods.append(ssh.SSH())
-methods.append(perforce.Perforce())
-methods.append(bzr.Bzr())
-methods.append(hg.Hg())
-methods.append(osc.Osc())
-methods.append(repo.Repo())
diff --git a/bitbake/lib/bb/fetch/bzr.py b/bitbake/lib/bb/fetch/bzr.py
deleted file mode 100644
index afaf799900..0000000000
--- a/bitbake/lib/bb/fetch/bzr.py
+++ /dev/null
@@ -1,148 +0,0 @@
-"""
-BitBake 'Fetch' implementation for bzr.
-
-"""
-
-# Copyright (C) 2007 Ross Burton
-# Copyright (C) 2007 Richard Purdie
-#
-# Classes for obtaining upstream sources for the
-# BitBake build tools.
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os
-import sys
-import logging
-import bb
-from bb import data
-from bb.fetch import Fetch, FetchError, runfetchcmd, logger
-
-class Bzr(Fetch):
- def supports(self, url, ud, d):
- return ud.type in ['bzr']
-
- def localpath (self, url, ud, d):
-
- # Create paths to bzr checkouts
- relpath = self._strip_leading_slashes(ud.path)
- ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath)
-
- revision = Fetch.srcrev_internal_helper(ud, d)
- if revision is True:
- ud.revision = self.latest_revision(url, ud, d)
- elif revision:
- ud.revision = revision
-
- if not ud.revision:
- ud.revision = self.latest_revision(url, ud, d)
-
- ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
-
- return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
-
- def _buildbzrcommand(self, ud, d, command):
- """
- Build up an bzr commandline based on ud
- command is "fetch", "update", "revno"
- """
-
- basecmd = data.expand('${FETCHCMD_bzr}', d)
-
- proto = ud.parm.get('proto', 'http')
-
- bzrroot = ud.host + ud.path
-
- options = []
-
- if command is "revno":
- bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
- else:
- if ud.revision:
- options.append("-r %s" % ud.revision)
-
- if command is "fetch":
- bzrcmd = "%s co %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
- elif command is "update":
- bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options))
- else:
- raise FetchError("Invalid bzr command %s" % command)
-
- return bzrcmd
-
- def go(self, loc, ud, d):
- """Fetch url"""
-
- if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
- bzrcmd = self._buildbzrcommand(ud, d, "update")
- logger.debug(1, "BZR Update %s", loc)
- os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
- runfetchcmd(bzrcmd, d)
- else:
- bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
- bzrcmd = self._buildbzrcommand(ud, d, "fetch")
- logger.debug(1, "BZR Checkout %s", loc)
- bb.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- logger.debug(1, "Running %s", bzrcmd)
- runfetchcmd(bzrcmd, d)
-
- os.chdir(ud.pkgdir)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"
-
- # tar them up to a defined filename
- try:
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d)
- except:
- t, v, tb = sys.exc_info()
- try:
- os.unlink(ud.localpath)
- except OSError:
- pass
- raise t, v, tb
-
- def supports_srcrev(self):
- return True
-
- def _revision_key(self, url, ud, d):
- """
- Return a unique key for the url
- """
- return "bzr:" + ud.pkgdir
-
- def _latest_revision(self, url, ud, d):
- """
- Return the latest upstream revision number
- """
- logger.debug(2, "BZR fetcher hitting network for %s", url)
-
- output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
-
- return output.strip()
-
- def _sortable_revision(self, url, ud, d):
- """
- Return a sortable revision number which in our case is the revision number
- """
-
- return self._build_revision(url, ud, d)
-
- def _build_revision(self, url, ud, d):
- return ud.revision
diff --git a/bitbake/lib/bb/fetch/cvs.py b/bitbake/lib/bb/fetch/cvs.py
deleted file mode 100644
index 0edb794b04..0000000000
--- a/bitbake/lib/bb/fetch/cvs.py
+++ /dev/null
@@ -1,172 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-#Based on functions from the base bb module, Copyright 2003 Holger Schurig
-#
-
-import os
-import logging
-import bb
-from bb import data
-from bb.fetch import Fetch, FetchError, MissingParameterError, logger
-
-class Cvs(Fetch):
- """
- Class to fetch a module or modules from cvs repositories
- """
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with cvs.
- """
- return ud.type in ['cvs']
-
- def localpath(self, url, ud, d):
- if not "module" in ud.parm:
- raise MissingParameterError("cvs method needs a 'module' parameter")
- ud.module = ud.parm["module"]
-
- ud.tag = ud.parm.get('tag', "")
-
- # Override the default date in certain cases
- if 'date' in ud.parm:
- ud.date = ud.parm['date']
- elif ud.tag:
- ud.date = ""
-
- norecurse = ''
- if 'norecurse' in ud.parm:
- norecurse = '_norecurse'
-
- fullpath = ''
- if 'fullpath' in ud.parm:
- fullpath = '_fullpath'
-
- ud.localfile = data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
-
- return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
-
- def forcefetch(self, url, ud, d):
- if (ud.date == "now"):
- return True
- return False
-
- def go(self, loc, ud, d):
-
- method = ud.parm.get('method', 'pserver')
- localdir = ud.parm.get('localdir', ud.module)
- cvs_port = ud.parm.get('port', '')
-
- cvs_rsh = None
- if method == "ext":
- if "rsh" in ud.parm:
- cvs_rsh = ud.parm["rsh"]
-
- if method == "dir":
- cvsroot = ud.path
- else:
- cvsroot = ":" + method
- cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True)
- if cvsproxyhost:
- cvsroot += ";proxy=" + cvsproxyhost
- cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True)
- if cvsproxyport:
- cvsroot += ";proxyport=" + cvsproxyport
- cvsroot += ":" + ud.user
- if ud.pswd:
- cvsroot += ":" + ud.pswd
- cvsroot += "@" + ud.host + ":" + cvs_port + ud.path
-
- options = []
- if 'norecurse' in ud.parm:
- options.append("-l")
- if ud.date:
- # treat YYYYMMDDHHMM specially for CVS
- if len(ud.date) == 12:
- options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12]))
- else:
- options.append("-D \"%s UTC\"" % ud.date)
- if ud.tag:
- options.append("-r %s" % ud.tag)
-
- localdata = data.createCopy(d)
- data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
- data.update_data(localdata)
-
- data.setVar('CVSROOT', cvsroot, localdata)
- data.setVar('CVSCOOPTS', " ".join(options), localdata)
- data.setVar('CVSMODULE', ud.module, localdata)
- cvscmd = data.getVar('FETCHCOMMAND', localdata, 1)
- cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1)
-
- if cvs_rsh:
- cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
- cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
-
- # create module directory
- logger.debug(2, "Fetch: checking for module directory")
- pkg = data.expand('${PN}', d)
- pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
- moddir = os.path.join(pkgdir, localdir)
- if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
- logger.info("Update " + loc)
- # update sources there
- os.chdir(moddir)
- myret = os.system(cvsupdatecmd)
- else:
- logger.info("Fetch " + loc)
- # check out sources there
- bb.mkdirhier(pkgdir)
- os.chdir(pkgdir)
- logger.debug(1, "Running %s", cvscmd)
- myret = os.system(cvscmd)
-
- if myret != 0 or not os.access(moddir, os.R_OK):
- try:
- os.rmdir(moddir)
- except OSError:
- pass
- raise FetchError(ud.module)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude 'CVS'"
-
- # tar them up to a defined filename
- if 'fullpath' in ud.parm:
- os.chdir(pkgdir)
- myret = os.system("tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir))
- else:
- os.chdir(moddir)
- os.chdir('..')
- myret = os.system("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir)))
-
- if myret != 0:
- try:
- os.unlink(ud.localpath)
- except OSError:
- pass
- raise FetchError(ud.module)
diff --git a/bitbake/lib/bb/fetch/git.py b/bitbake/lib/bb/fetch/git.py
deleted file mode 100644
index b37a09743e..0000000000
--- a/bitbake/lib/bb/fetch/git.py
+++ /dev/null
@@ -1,339 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' git implementation
-
-"""
-
-#Copyright (C) 2005 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os
-import bb
-import bb.persist_data
-from bb import data
-from bb.fetch import Fetch
-from bb.fetch import runfetchcmd
-from bb.fetch import logger
-
-class Git(Fetch):
- """Class to fetch a module or modules from git repositories"""
- def init(self, d):
- #
- # Only enable _sortable revision if the key is set
- #
- if bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True):
- self._sortable_buildindex = self._sortable_buildindex_disabled
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with git.
- """
- return ud.type in ['git']
-
- def localpath(self, url, ud, d):
-
- if 'protocol' in ud.parm:
- ud.proto = ud.parm['protocol']
- elif not ud.host:
- ud.proto = 'file'
- else:
- ud.proto = "rsync"
-
- ud.branch = ud.parm.get("branch", "master")
-
- gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.'))
- ud.mirrortarball = 'git_%s.tar.gz' % (gitsrcname)
- ud.clonedir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)
-
- tag = Fetch.srcrev_internal_helper(ud, d)
- if tag is True:
- ud.tag = self.latest_revision(url, ud, d)
- elif tag:
- ud.tag = tag
-
- if not ud.tag or ud.tag == "master":
- ud.tag = self.latest_revision(url, ud, d)
-
- subdir = ud.parm.get("subpath", "")
- if subdir != "":
- if subdir.endswith("/"):
- subdir = subdir[:-1]
- subdirpath = os.path.join(ud.path, subdir);
- else:
- subdirpath = ud.path;
-
- if 'fullclone' in ud.parm:
- ud.localfile = ud.mirrortarball
- else:
- ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, subdirpath.replace('/', '.'), ud.tag), d)
-
- ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
-
- if 'noclone' in ud.parm:
- ud.localfile = None
- return None
-
- return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
-
- def forcefetch(self, url, ud, d):
- if 'fullclone' in ud.parm:
- return True
- if 'noclone' in ud.parm:
- return False
- if os.path.exists(ud.localpath):
- return False
- if not self._contains_ref(ud.tag, d):
- return True
- return False
-
- def try_premirror(self, u, ud, d):
- if 'noclone' in ud.parm:
- return False
- if os.path.exists(ud.clonedir):
- return False
- if os.path.exists(ud.localpath):
- return False
-
- return True
-
- def go(self, loc, ud, d):
- """Fetch url"""
-
- if ud.user:
- username = ud.user + '@'
- else:
- username = ""
-
- repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball)
-
-
- coname = '%s' % (ud.tag)
- codir = os.path.join(ud.clonedir, coname)
-
- # If we have no existing clone and no mirror tarball, try and obtain one
- if not os.path.exists(ud.clonedir) and not os.path.exists(repofile):
- try:
- Fetch.try_mirrors(ud.mirrortarball)
- except:
- pass
-
- # If the checkout doesn't exist and the mirror tarball does, extract it
- if not os.path.exists(ud.clonedir) and os.path.exists(repofile):
- bb.mkdirhier(ud.clonedir)
- os.chdir(ud.clonedir)
- runfetchcmd("tar -xzf %s" % (repofile), d)
-
- # If the repo still doesn't exist, fallback to cloning it
- if not os.path.exists(ud.clonedir):
- runfetchcmd("%s clone -n %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d)
-
- os.chdir(ud.clonedir)
- # Update the checkout if needed
- if not self._contains_ref(ud.tag, d) or 'fullclone' in ud.parm:
- # Remove all but the .git directory
- runfetchcmd("rm * -Rf", d)
- if 'fullclone' in ud.parm:
- runfetchcmd("%s fetch --all" % (ud.basecmd), d)
- else:
- runfetchcmd("%s fetch %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.branch), d)
- runfetchcmd("%s fetch --tags %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d)
- runfetchcmd("%s prune-packed" % ud.basecmd, d)
- runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
-
- # Generate a mirror tarball if needed
- os.chdir(ud.clonedir)
- mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True)
- if mirror_tarballs != "0" or 'fullclone' in ud.parm:
- logger.info("Creating tarball of git repository")
- runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d)
-
- if 'fullclone' in ud.parm:
- return
-
- if os.path.exists(codir):
- bb.utils.prunedir(codir)
-
- subdir = ud.parm.get("subpath", "")
- if subdir != "":
- if subdir.endswith("/"):
- subdirbase = os.path.basename(subdir[:-1])
- else:
- subdirbase = os.path.basename(subdir)
- else:
- subdirbase = ""
-
- if subdir != "":
- readpathspec = ":%s" % (subdir)
- codir = os.path.join(codir, "git")
- coprefix = os.path.join(codir, subdirbase, "")
- else:
- readpathspec = ""
- coprefix = os.path.join(codir, "git", "")
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- runfetchcmd("%s clone -n %s %s" % (ud.basecmd, ud.clonedir, coprefix), d)
- os.chdir(coprefix)
- runfetchcmd("%s checkout -q -f %s%s" % (ud.basecmd, ud.tag, readpathspec), d)
- else:
- bb.mkdirhier(codir)
- os.chdir(ud.clonedir)
- runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.tag, readpathspec), d)
- runfetchcmd("%s checkout-index -q -f --prefix=%s -a" % (ud.basecmd, coprefix), d)
-
- os.chdir(codir)
- logger.info("Creating tarball of git checkout")
- runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d)
-
- os.chdir(ud.clonedir)
- bb.utils.prunedir(codir)
-
- def supports_srcrev(self):
- return True
-
- def _contains_ref(self, tag, d):
- basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
- output = runfetchcmd("%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag), d, quiet=True)
- return output.split()[0] != "0"
-
- def _revision_key(self, url, ud, d, branch=False):
- """
- Return a unique key for the url
- """
- key = 'git:' + ud.host + ud.path.replace('/', '.')
- if branch:
- return key + ud.branch
- else:
- return key
-
- def generate_revision_key(self, url, ud, d, branch=False):
- key = self._revision_key(url, ud, d, branch)
- return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
-
- def _latest_revision(self, url, ud, d):
- """
- Compute the HEAD revision for the url
- """
- if ud.user:
- username = ud.user + '@'
- else:
- username = ""
-
- basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
- cmd = "%s ls-remote %s://%s%s%s %s" % (basecmd, ud.proto, username, ud.host, ud.path, ud.branch)
- output = runfetchcmd(cmd, d, True)
- if not output:
- raise bb.fetch.FetchError("Fetch command %s gave empty output\n" % (cmd))
- return output.split()[0]
-
- def latest_revision(self, url, ud, d):
- """
- Look in the cache for the latest revision, if not present ask the SCM.
- """
- persisted = bb.persist_data.persist(d)
- revs = persisted['BB_URI_HEADREVS']
-
- key = self.generate_revision_key(url, ud, d, branch=True)
- rev = revs[key]
- if rev is None:
- # Compatibility with old key format, no branch included
- oldkey = self.generate_revision_key(url, ud, d, branch=False)
- rev = revs[oldkey]
- if rev is not None:
- del revs[oldkey]
- else:
- rev = self._latest_revision(url, ud, d)
- revs[key] = rev
-
- return str(rev)
-
- def sortable_revision(self, url, ud, d):
- """
-
- """
- pd = bb.persist_data.persist(d)
- localcounts = pd['BB_URI_LOCALCOUNT']
- key = self.generate_revision_key(url, ud, d, branch=True)
- oldkey = self.generate_revision_key(url, ud, d, branch=False)
-
- latest_rev = self._build_revision(url, ud, d)
- last_rev = localcounts[key + '_rev']
- if last_rev is None:
- last_rev = localcounts[oldkey + '_rev']
- if last_rev is not None:
- del localcounts[oldkey + '_rev']
- localcounts[key + '_rev'] = last_rev
-
- uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
- count = None
- if uselocalcount:
- count = Fetch.localcount_internal_helper(ud, d)
- if count is None:
- count = localcounts[key + '_count']
- if count is None:
- count = localcounts[oldkey + '_count']
- if count is not None:
- del localcounts[oldkey + '_count']
- localcounts[key + '_count'] = count
-
- if last_rev == latest_rev:
- return str(count + "+" + latest_rev)
-
- buildindex_provided = hasattr(self, "_sortable_buildindex")
- if buildindex_provided:
- count = self._sortable_buildindex(url, ud, d, latest_rev)
- if count is None:
- count = "0"
- elif uselocalcount or buildindex_provided:
- count = str(count)
- else:
- count = str(int(count) + 1)
-
- localcounts[key + '_rev'] = latest_rev
- localcounts[key + '_count'] = count
-
- return str(count + "+" + latest_rev)
-
- def _build_revision(self, url, ud, d):
- return ud.tag
-
- def _sortable_buildindex_disabled(self, url, ud, d, rev):
- """
- Return a suitable buildindex for the revision specified. This is done by counting revisions
- using "git rev-list" which may or may not work in different circumstances.
- """
-
- cwd = os.getcwd()
-
- # Check if we have the rev already
-
- if not os.path.exists(ud.clonedir):
- print("no repo")
- self.go(None, ud, d)
- if not os.path.exists(ud.clonedir):
- logger.error("GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value", url, ud.clonedir)
- return None
-
-
- os.chdir(ud.clonedir)
- if not self._contains_ref(rev, d):
- self.go(None, ud, d)
-
- output = runfetchcmd("%s rev-list %s -- 2> /dev/null | wc -l" % (ud.basecmd, rev), d, quiet=True)
- os.chdir(cwd)
-
- buildindex = "%s" % output.split()[0]
- logger.debug(1, "GIT repository for %s in %s is returning %s revisions in rev-list before %s", url, ud.clonedir, buildindex, rev)
- return buildindex
diff --git a/bitbake/lib/bb/fetch/hg.py b/bitbake/lib/bb/fetch/hg.py
deleted file mode 100644
index 3c649a6ad0..0000000000
--- a/bitbake/lib/bb/fetch/hg.py
+++ /dev/null
@@ -1,180 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementation for mercurial DRCS (hg).
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2004 Marcin Juszkiewicz
-# Copyright (C) 2007 Robert Schuster
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import sys
-import logging
-import bb
-from bb import data
-from bb.fetch import Fetch
-from bb.fetch import FetchError
-from bb.fetch import MissingParameterError
-from bb.fetch import runfetchcmd
-from bb.fetch import logger
-
-class Hg(Fetch):
- """Class to fetch from mercurial repositories"""
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with mercurial.
- """
- return ud.type in ['hg']
-
- def forcefetch(self, url, ud, d):
- revTag = ud.parm.get('rev', 'tip')
- return revTag == "tip"
-
- def localpath(self, url, ud, d):
- if not "module" in ud.parm:
- raise MissingParameterError("hg method needs a 'module' parameter")
-
- ud.module = ud.parm["module"]
-
- # Create paths to mercurial checkouts
- relpath = self._strip_leading_slashes(ud.path)
- ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
- ud.moddir = os.path.join(ud.pkgdir, ud.module)
-
- if 'rev' in ud.parm:
- ud.revision = ud.parm['rev']
- else:
- tag = Fetch.srcrev_internal_helper(ud, d)
- if tag is True:
- ud.revision = self.latest_revision(url, ud, d)
- elif tag:
- ud.revision = tag
- else:
- ud.revision = self.latest_revision(url, ud, d)
-
- ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
-
- return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
-
- def _buildhgcommand(self, ud, d, command):
- """
- Build up an hg commandline based on ud
- command is "fetch", "update", "info"
- """
-
- basecmd = data.expand('${FETCHCMD_hg}', d)
-
- proto = ud.parm.get('proto', 'http')
-
- host = ud.host
- if proto == "file":
- host = "/"
- ud.host = "localhost"
-
- if not ud.user:
- hgroot = host + ud.path
- else:
- hgroot = ud.user + "@" + host + ud.path
-
- if command is "info":
- return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module)
-
- options = [];
- if ud.revision:
- options.append("-r %s" % ud.revision)
-
- if command is "fetch":
- cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
- elif command is "pull":
- # do not pass options list; limiting pull to rev causes the local
- # repo not to contain it and immediately following "update" command
- # will crash
- cmd = "%s pull" % (basecmd)
- elif command is "update":
- cmd = "%s update -C %s" % (basecmd, " ".join(options))
- else:
- raise FetchError("Invalid hg command %s" % command)
-
- return cmd
-
- def go(self, loc, ud, d):
- """Fetch url"""
-
- logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
-
- if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
- updatecmd = self._buildhgcommand(ud, d, "pull")
- logger.info("Update " + loc)
- # update sources there
- os.chdir(ud.moddir)
- logger.debug(1, "Running %s", updatecmd)
- runfetchcmd(updatecmd, d)
-
- else:
- fetchcmd = self._buildhgcommand(ud, d, "fetch")
- logger.info("Fetch " + loc)
- # check out sources there
- bb.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- logger.debug(1, "Running %s", fetchcmd)
- runfetchcmd(fetchcmd, d)
-
- # Even when we clone (fetch), we still need to update as hg's clone
- # won't checkout the specified revision if its on a branch
- updatecmd = self._buildhgcommand(ud, d, "update")
- os.chdir(ud.moddir)
- logger.debug(1, "Running %s", updatecmd)
- runfetchcmd(updatecmd, d)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude '.hg' --exclude '.hgrags'"
-
- os.chdir(ud.pkgdir)
- try:
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d)
- except:
- t, v, tb = sys.exc_info()
- try:
- os.unlink(ud.localpath)
- except OSError:
- pass
- raise t, v, tb
-
- def supports_srcrev(self):
- return True
-
- def _latest_revision(self, url, ud, d):
- """
- Compute tip revision for the url
- """
- output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
- return output.strip()
-
- def _build_revision(self, url, ud, d):
- return ud.revision
-
- def _revision_key(self, url, ud, d):
- """
- Return a unique key for the url
- """
- return "hg:" + ud.moddir
diff --git a/bitbake/lib/bb/fetch/local.py b/bitbake/lib/bb/fetch/local.py
deleted file mode 100644
index 6aa9e45768..0000000000
--- a/bitbake/lib/bb/fetch/local.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import bb
-import bb.utils
-from bb import data
-from bb.fetch import Fetch
-
-class Local(Fetch):
- def supports(self, url, urldata, d):
- """
- Check to see if a given url represents a local fetch.
- """
- return urldata.type in ['file']
-
- def localpath(self, url, urldata, d):
- """
- Return the local filename of a given url assuming a successful fetch.
- """
- path = url.split("://")[1]
- path = path.split(";")[0]
- newpath = path
- if path[0] != "/":
- filespath = data.getVar('FILESPATH', d, 1)
- if filespath:
- newpath = bb.utils.which(filespath, path)
- if not newpath:
- filesdir = data.getVar('FILESDIR', d, 1)
- if filesdir:
- newpath = os.path.join(filesdir, path)
- # We don't set localfile as for this fetcher the file is already local!
- return newpath
-
- def go(self, url, urldata, d):
- """Fetch urls (no-op for Local method)"""
- # no need to fetch local files, we'll deal with them in place.
- return 1
-
- def checkstatus(self, url, urldata, d):
- """
- Check the status of the url
- """
- if urldata.localpath.find("*") != -1:
- logger.info("URL %s looks like a glob and was therefore not checked.", url)
- return True
- if os.path.exists(urldata.localpath):
- return True
- return False
diff --git a/bitbake/lib/bb/fetch/osc.py b/bitbake/lib/bb/fetch/osc.py
deleted file mode 100644
index 8e0423d762..0000000000
--- a/bitbake/lib/bb/fetch/osc.py
+++ /dev/null
@@ -1,143 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-Bitbake "Fetch" implementation for osc (Opensuse build service client).
-Based on the svn "Fetch" implementation.
-
-"""
-
-import os
-import sys
-import logging
-import bb
-from bb import data
-from bb import utils
-from bb.fetch import Fetch
-from bb.fetch import FetchError
-from bb.fetch import MissingParameterError
-from bb.fetch import runfetchcmd
-
-class Osc(Fetch):
- """Class to fetch a module or modules from Opensuse build server
- repositories."""
-
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with osc.
- """
- return ud.type in ['osc']
-
- def localpath(self, url, ud, d):
- if not "module" in ud.parm:
- raise MissingParameterError("osc method needs a 'module' parameter.")
-
- ud.module = ud.parm["module"]
-
- # Create paths to osc checkouts
- relpath = self._strip_leading_slashes(ud.path)
- ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host)
- ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
-
- if 'rev' in ud.parm:
- ud.revision = ud.parm['rev']
- else:
- pv = data.getVar("PV", d, 0)
- rev = Fetch.srcrev_internal_helper(ud, d)
- if rev and rev != True:
- ud.revision = rev
- else:
- ud.revision = ""
-
- ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d)
-
- return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
-
- def _buildosccommand(self, ud, d, command):
- """
- Build up an ocs commandline based on ud
- command is "fetch", "update", "info"
- """
-
- basecmd = data.expand('${FETCHCMD_osc}', d)
-
- proto = ud.parm.get('proto', 'ocs')
-
- options = []
-
- config = "-c %s" % self.generate_config(ud, d)
-
- if ud.revision:
- options.append("-r %s" % ud.revision)
-
- coroot = self._strip_leading_slashes(ud.path)
-
- if command is "fetch":
- osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options))
- elif command is "update":
- osccmd = "%s %s up %s" % (basecmd, config, " ".join(options))
- else:
- raise FetchError("Invalid osc command %s" % command)
-
- return osccmd
-
- def go(self, loc, ud, d):
- """
- Fetch url
- """
-
- logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
-
- if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
- oscupdatecmd = self._buildosccommand(ud, d, "update")
- logger.info("Update "+ loc)
- # update sources there
- os.chdir(ud.moddir)
- logger.debug(1, "Running %s", oscupdatecmd)
- runfetchcmd(oscupdatecmd, d)
- else:
- oscfetchcmd = self._buildosccommand(ud, d, "fetch")
- logger.info("Fetch " + loc)
- # check out sources there
- bb.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- logger.debug(1, "Running %s", oscfetchcmd)
- runfetchcmd(oscfetchcmd, d)
-
- os.chdir(os.path.join(ud.pkgdir + ud.path))
- # tar them up to a defined filename
- try:
- runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d)
- except:
- t, v, tb = sys.exc_info()
- try:
- os.unlink(ud.localpath)
- except OSError:
- pass
- raise t, v, tb
-
- def supports_srcrev(self):
- return False
-
- def generate_config(self, ud, d):
- """
- Generate a .oscrc to be used for this run.
- """
-
- config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc")
- bb.utils.remove(config_path)
-
- f = open(config_path, 'w')
- f.write("[general]\n")
- f.write("apisrv = %s\n" % ud.host)
- f.write("scheme = http\n")
- f.write("su-wrapper = su -c\n")
- f.write("build-root = %s\n" % data.expand('${WORKDIR}', d))
- f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n")
- f.write("extra-pkgs = gzip\n")
- f.write("\n")
- f.write("[%s]\n" % ud.host)
- f.write("user = %s\n" % ud.parm["user"])
- f.write("pass = %s\n" % ud.parm["pswd"])
- f.close()
-
- return config_path
diff --git a/bitbake/lib/bb/fetch/perforce.py b/bitbake/lib/bb/fetch/perforce.py
deleted file mode 100644
index 222ed7eaaa..0000000000
--- a/bitbake/lib/bb/fetch/perforce.py
+++ /dev/null
@@ -1,206 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-from future_builtins import zip
-import os
-import logging
-import bb
-from bb import data
-from bb.fetch import Fetch
-from bb.fetch import FetchError
-from bb.fetch import logger
-
-class Perforce(Fetch):
- def supports(self, url, ud, d):
- return ud.type in ['p4']
-
- def doparse(url, d):
- parm = {}
- path = url.split("://")[1]
- delim = path.find("@");
- if delim != -1:
- (user, pswd, host, port) = path.split('@')[0].split(":")
- path = path.split('@')[1]
- else:
- (host, port) = data.getVar('P4PORT', d).split(':')
- user = ""
- pswd = ""
-
- if path.find(";") != -1:
- keys=[]
- values=[]
- plist = path.split(';')
- for item in plist:
- if item.count('='):
- (key, value) = item.split('=')
- keys.append(key)
- values.append(value)
-
- parm = dict(zip(keys, values))
- path = "//" + path.split(';')[0]
- host += ":%s" % (port)
- parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
-
- return host, path, user, pswd, parm
- doparse = staticmethod(doparse)
-
- def getcset(d, depot, host, user, pswd, parm):
- p4opt = ""
- if "cset" in parm:
- return parm["cset"];
- if user:
- p4opt += " -u %s" % (user)
- if pswd:
- p4opt += " -P %s" % (pswd)
- if host:
- p4opt += " -p %s" % (host)
-
- p4date = data.getVar("P4DATE", d, 1)
- if "revision" in parm:
- depot += "#%s" % (parm["revision"])
- elif "label" in parm:
- depot += "@%s" % (parm["label"])
- elif p4date:
- depot += "@%s" % (p4date)
-
- p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1)
- logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
- p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
- cset = p4file.readline().strip()
- logger.debug(1, "READ %s", cset)
- if not cset:
- return -1
-
- return cset.split(' ')[1]
- getcset = staticmethod(getcset)
-
- def localpath(self, url, ud, d):
-
- (host, path, user, pswd, parm) = Perforce.doparse(url, d)
-
- # If a label is specified, we use that as our filename
-
- if "label" in parm:
- ud.localfile = "%s.tar.gz" % (parm["label"])
- return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile)
-
- base = path
- which = path.find('/...')
- if which != -1:
- base = path[:which]
-
- base = self._strip_leading_slashes(base)
-
- cset = Perforce.getcset(d, path, host, user, pswd, parm)
-
- ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d)
-
- return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile)
-
- def go(self, loc, ud, d):
- """
- Fetch urls
- """
-
- (host, depot, user, pswd, parm) = Perforce.doparse(loc, d)
-
- if depot.find('/...') != -1:
- path = depot[:depot.find('/...')]
- else:
- path = depot
-
- module = parm.get('module', os.path.basename(path))
-
- localdata = data.createCopy(d)
- data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata)
- data.update_data(localdata)
-
- # Get the p4 command
- p4opt = ""
- if user:
- p4opt += " -u %s" % (user)
-
- if pswd:
- p4opt += " -P %s" % (pswd)
-
- if host:
- p4opt += " -p %s" % (host)
-
- p4cmd = data.getVar('FETCHCOMMAND', localdata, 1)
-
- # create temp directory
- logger.debug(2, "Fetch: creating temporary directory")
- bb.mkdirhier(data.expand('${WORKDIR}', localdata))
- data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
- tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
- tmpfile = tmppipe.readline().strip()
- if not tmpfile:
- logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
- raise FetchError(module)
-
- if "label" in parm:
- depot = "%s@%s" % (depot, parm["label"])
- else:
- cset = Perforce.getcset(d, depot, host, user, pswd, parm)
- depot = "%s@%s" % (depot, cset)
-
- os.chdir(tmpfile)
- logger.info("Fetch " + loc)
- logger.info("%s%s files %s", p4cmd, p4opt, depot)
- p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot))
-
- if not p4file:
- logger.error("Fetch: unable to get the P4 files from %s", depot)
- raise FetchError(module)
-
- count = 0
-
- for file in p4file:
- list = file.split()
-
- if list[2] == "delete":
- continue
-
- dest = list[0][len(path)+1:]
- where = dest.find("#")
-
- os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]))
- count = count + 1
-
- if count == 0:
- logger.error("Fetch: No files gathered from the P4 fetch")
- raise FetchError(module)
-
- myret = os.system("tar -czf %s %s" % (ud.localpath, module))
- if myret != 0:
- try:
- os.unlink(ud.localpath)
- except OSError:
- pass
- raise FetchError(module)
- # cleanup
- bb.utils.prunedir(tmpfile)
diff --git a/bitbake/lib/bb/fetch/repo.py b/bitbake/lib/bb/fetch/repo.py
deleted file mode 100644
index 03642e7a0d..0000000000
--- a/bitbake/lib/bb/fetch/repo.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake "Fetch" repo (git) implementation
-
-"""
-
-# Copyright (C) 2009 Tom Rini <trini@embeddedalley.com>
-#
-# Based on git.py which is:
-#Copyright (C) 2005 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os
-import bb
-from bb import data
-from bb.fetch import Fetch
-from bb.fetch import runfetchcmd
-
-class Repo(Fetch):
- """Class to fetch a module or modules from repo (git) repositories"""
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with repo.
- """
- return ud.type in ["repo"]
-
- def localpath(self, url, ud, d):
- """
- We don"t care about the git rev of the manifests repository, but
- we do care about the manifest to use. The default is "default".
- We also care about the branch or tag to be used. The default is
- "master".
- """
-
- ud.proto = ud.parm.get('protocol', 'git')
- ud.branch = ud.parm.get('branch', 'master')
- ud.manifest = ud.parm.get('manifest', 'default.xml')
- if not ud.manifest.endswith('.xml'):
- ud.manifest += '.xml'
-
- ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d)
-
- return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
-
- def go(self, loc, ud, d):
- """Fetch url"""
-
- if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
- logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
- return
-
- gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
- repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo")
- codir = os.path.join(repodir, gitsrcname, ud.manifest)
-
- if ud.user:
- username = ud.user + "@"
- else:
- username = ""
-
- bb.mkdirhier(os.path.join(codir, "repo"))
- os.chdir(os.path.join(codir, "repo"))
- if not os.path.exists(os.path.join(codir, "repo", ".repo")):
- runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)
-
- runfetchcmd("repo sync", d)
- os.chdir(codir)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude '.repo' --exclude '.git'"
-
- # Create a cache
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
-
- def supports_srcrev(self):
- return False
-
- def _build_revision(self, url, ud, d):
- return ud.manifest
-
- def _want_sortable_revision(self, url, ud, d):
- return False
diff --git a/bitbake/lib/bb/fetch/ssh.py b/bitbake/lib/bb/fetch/ssh.py
deleted file mode 100644
index 86c76f4e44..0000000000
--- a/bitbake/lib/bb/fetch/ssh.py
+++ /dev/null
@@ -1,118 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-'''
-BitBake 'Fetch' implementations
-
-This implementation is for Secure Shell (SSH), and attempts to comply with the
-IETF secsh internet draft:
- http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/
-
- Currently does not support the sftp parameters, as this uses scp
- Also does not support the 'fingerprint' connection parameter.
-
-'''
-
-# Copyright (C) 2006 OpenedHand Ltd.
-#
-#
-# Based in part on svk.py:
-# Copyright (C) 2006 Holger Hans Peter Freyther
-# Based on svn.py:
-# Copyright (C) 2003, 2004 Chris Larson
-# Based on functions from the base bb module:
-# Copyright 2003 Holger Schurig
-#
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import re, os
-from bb import data
-from bb.fetch import Fetch
-from bb.fetch import FetchError
-
-
-__pattern__ = re.compile(r'''
- \s* # Skip leading whitespace
- ssh:// # scheme
- ( # Optional username/password block
- (?P<user>\S+) # username
- (:(?P<pass>\S+))? # colon followed by the password (optional)
- )?
- (?P<cparam>(;[^;]+)*)? # connection parameters block (optional)
- @
- (?P<host>\S+?) # non-greedy match of the host
- (:(?P<port>[0-9]+))? # colon followed by the port (optional)
- /
- (?P<path>[^;]+) # path on the remote system, may be absolute or relative,
- # and may include the use of '~' to reference the remote home
- # directory
- (?P<sparam>(;[^;]+)*)? # parameters block (optional)
- $
-''', re.VERBOSE)
-
-class SSH(Fetch):
- '''Class to fetch a module or modules via Secure Shell'''
-
- def supports(self, url, urldata, d):
- return __pattern__.match(url) != None
-
- def localpath(self, url, urldata, d):
- m = __pattern__.match(url)
- path = m.group('path')
- host = m.group('host')
- lpath = os.path.join(data.getVar('DL_DIR', d, True), host, os.path.basename(path))
- return lpath
-
- def go(self, url, urldata, d):
- dldir = data.getVar('DL_DIR', d, 1)
-
- m = __pattern__.match(url)
- path = m.group('path')
- host = m.group('host')
- port = m.group('port')
- user = m.group('user')
- password = m.group('pass')
-
- ldir = os.path.join(dldir, host)
- lpath = os.path.join(ldir, os.path.basename(path))
-
- if not os.path.exists(ldir):
- os.makedirs(ldir)
-
- if port:
- port = '-P %s' % port
- else:
- port = ''
-
- if user:
- fr = user
- if password:
- fr += ':%s' % password
- fr += '@%s' % host
- else:
- fr = host
- fr += ':%s' % path
-
-
- import commands
- cmd = 'scp -B -r %s %s %s/' % (
- port,
- commands.mkarg(fr),
- commands.mkarg(ldir)
- )
-
- (exitstatus, output) = commands.getstatusoutput(cmd)
- if exitstatus != 0:
- print(output)
- raise FetchError('Unable to fetch %s' % url)
diff --git a/bitbake/lib/bb/fetch/svk.py b/bitbake/lib/bb/fetch/svk.py
deleted file mode 100644
index 595a9da255..0000000000
--- a/bitbake/lib/bb/fetch/svk.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-This implementation is for svk. It is based on the svn implementation
-
-"""
-
-# Copyright (C) 2006 Holger Hans Peter Freyther
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import logging
-import bb
-from bb import data
-from bb.fetch import Fetch
-from bb.fetch import FetchError
-from bb.fetch import MissingParameterError
-from bb.fetch import logger
-
-class Svk(Fetch):
- """Class to fetch a module or modules from svk repositories"""
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with svk.
- """
- return ud.type in ['svk']
-
- def localpath(self, url, ud, d):
- if not "module" in ud.parm:
- raise MissingParameterError("svk method needs a 'module' parameter")
- else:
- ud.module = ud.parm["module"]
-
- ud.revision = ud.parm.get('rev', "")
-
- ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
-
- return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
-
- def forcefetch(self, url, ud, d):
- return ud.date == "now"
-
- def go(self, loc, ud, d):
- """Fetch urls"""
-
- svkroot = ud.host + ud.path
-
- svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module)
-
- if ud.revision:
- svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module)
-
- # create temp directory
- localdata = data.createCopy(d)
- data.update_data(localdata)
- logger.debug(2, "Fetch: creating temporary directory")
- bb.mkdirhier(data.expand('${WORKDIR}', localdata))
- data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
- tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
- tmpfile = tmppipe.readline().strip()
- if not tmpfile:
- logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
- raise FetchError(ud.module)
-
- # check out sources there
- os.chdir(tmpfile)
- logger.info("Fetch " + loc)
- logger.debug(1, "Running %s", svkcmd)
- myret = os.system(svkcmd)
- if myret != 0:
- try:
- os.rmdir(tmpfile)
- except OSError:
- pass
- raise FetchError(ud.module)
-
- os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
- # tar them up to a defined filename
- myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)))
- if myret != 0:
- try:
- os.unlink(ud.localpath)
- except OSError:
- pass
- raise FetchError(ud.module)
- # cleanup
- bb.utils.prunedir(tmpfile)
diff --git a/bitbake/lib/bb/fetch/svn.py b/bitbake/lib/bb/fetch/svn.py
deleted file mode 100644
index 8f053abf74..0000000000
--- a/bitbake/lib/bb/fetch/svn.py
+++ /dev/null
@@ -1,204 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementation for svn.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2004 Marcin Juszkiewicz
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import sys
-import logging
-import bb
-from bb import data
-from bb.fetch import Fetch
-from bb.fetch import FetchError
-from bb.fetch import MissingParameterError
-from bb.fetch import runfetchcmd
-from bb.fetch import logger
-
-class Svn(Fetch):
- """Class to fetch a module or modules from svn repositories"""
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with svn.
- """
- return ud.type in ['svn']
-
- def localpath(self, url, ud, d):
- if not "module" in ud.parm:
- raise MissingParameterError("svn method needs a 'module' parameter")
-
- ud.module = ud.parm["module"]
-
- # Create paths to svn checkouts
- relpath = self._strip_leading_slashes(ud.path)
- ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
- ud.moddir = os.path.join(ud.pkgdir, ud.module)
-
- if 'rev' in ud.parm:
- ud.date = ""
- ud.revision = ud.parm['rev']
- elif 'date' in ud.date:
- ud.date = ud.parm['date']
- ud.revision = ""
- else:
- #
- # ***Nasty hack***
- # If DATE in unexpanded PV, use ud.date (which is set from SRCDATE)
- # Should warn people to switch to SRCREV here
- #
- pv = data.getVar("PV", d, 0)
- if "DATE" in pv:
- ud.revision = ""
- else:
- rev = Fetch.srcrev_internal_helper(ud, d)
- if rev is True:
- ud.revision = self.latest_revision(url, ud, d)
- ud.date = ""
- elif rev:
- ud.revision = rev
- ud.date = ""
- else:
- ud.revision = ""
-
- ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
-
- return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
-
- def _buildsvncommand(self, ud, d, command):
- """
- Build up an svn commandline based on ud
- command is "fetch", "update", "info"
- """
-
- basecmd = data.expand('${FETCHCMD_svn}', d)
-
- proto = ud.parm.get('proto', 'svn')
-
- svn_rsh = None
- if proto == "svn+ssh" and "rsh" in ud.parm:
- svn_rsh = ud.parm["rsh"]
-
- svnroot = ud.host + ud.path
-
- # either use the revision, or SRCDATE in braces,
- options = []
-
- if ud.user:
- options.append("--username %s" % ud.user)
-
- if ud.pswd:
- options.append("--password %s" % ud.pswd)
-
- if command is "info":
- svncmd = "%s info %s %s://%s/%s/" % (basecmd, " ".join(options), proto, svnroot, ud.module)
- else:
- suffix = ""
- if ud.revision:
- options.append("-r %s" % ud.revision)
- suffix = "@%s" % (ud.revision)
- elif ud.date:
- options.append("-r {%s}" % ud.date)
-
- if command is "fetch":
- svncmd = "%s co %s %s://%s/%s%s %s" % (basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module)
- elif command is "update":
- svncmd = "%s update %s" % (basecmd, " ".join(options))
- else:
- raise FetchError("Invalid svn command %s" % command)
-
- if svn_rsh:
- svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
-
- return svncmd
-
- def go(self, loc, ud, d):
- """Fetch url"""
-
- logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
-
- if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
- svnupdatecmd = self._buildsvncommand(ud, d, "update")
- logger.info("Update " + loc)
- # update sources there
- os.chdir(ud.moddir)
- logger.debug(1, "Running %s", svnupdatecmd)
- runfetchcmd(svnupdatecmd, d)
- else:
- svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
- logger.info("Fetch " + loc)
- # check out sources there
- bb.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- logger.debug(1, "Running %s", svnfetchcmd)
- runfetchcmd(svnfetchcmd, d)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude '.svn'"
-
- os.chdir(ud.pkgdir)
- # tar them up to a defined filename
- try:
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d)
- except:
- t, v, tb = sys.exc_info()
- try:
- os.unlink(ud.localpath)
- except OSError:
- pass
- raise t, v, tb
-
- def supports_srcrev(self):
- return True
-
- def _revision_key(self, url, ud, d):
- """
- Return a unique key for the url
- """
- return "svn:" + ud.moddir
-
- def _latest_revision(self, url, ud, d):
- """
- Return the latest upstream revision number
- """
- logger.debug(2, "SVN fetcher hitting network for %s", url)
-
- output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True)
-
- revision = None
- for line in output.splitlines():
- if "Last Changed Rev" in line:
- revision = line.split(":")[1].strip()
-
- return revision
-
- def _sortable_revision(self, url, ud, d):
- """
- Return a sortable revision number which in our case is the revision number
- """
-
- return self._build_revision(url, ud, d)
-
- def _build_revision(self, url, ud, d):
- return ud.revision
diff --git a/bitbake/lib/bb/fetch/wget.py b/bitbake/lib/bb/fetch/wget.py
deleted file mode 100644
index 4d4bdfd493..0000000000
--- a/bitbake/lib/bb/fetch/wget.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import logging
-import bb
-import urllib
-from bb import data
-from bb.fetch import Fetch, FetchError, encodeurl, decodeurl, logger, runfetchcmd
-
-class Wget(Fetch):
- """Class to fetch urls via 'wget'"""
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with wget.
- """
- return ud.type in ['http', 'https', 'ftp']
-
- def localpath(self, url, ud, d):
-
- url = encodeurl([ud.type, ud.host, ud.path, ud.user, ud.pswd, {}])
- ud.basename = os.path.basename(ud.path)
- ud.localfile = data.expand(urllib.unquote(ud.basename), d)
-
- return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
-
- def go(self, uri, ud, d, checkonly = False):
- """Fetch urls"""
-
- def fetch_uri(uri, ud, d):
- if checkonly:
- fetchcmd = data.getVar("CHECKCOMMAND", d, 1)
- elif os.path.exists(ud.localpath):
- # file exists, but we didnt complete it.. trying again..
- fetchcmd = data.getVar("RESUMECOMMAND", d, 1)
- else:
- fetchcmd = data.getVar("FETCHCOMMAND", d, 1)
-
- uri = uri.split(";")[0]
- uri_decoded = list(decodeurl(uri))
- uri_type = uri_decoded[0]
- uri_host = uri_decoded[1]
-
- fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
- fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
- logger.info("fetch " + uri)
- logger.debug(2, "executing " + fetchcmd)
- runfetchcmd(fetchcmd, d)
-
- # Sanity check since wget can pretend it succeed when it didn't
- # Also, this used to happen if sourceforge sent us to the mirror page
- if not os.path.exists(ud.localpath) and not checkonly:
- logger.debug(2, "The fetch command for %s returned success but %s doesn't exist?...", uri, ud.localpath)
- return False
-
- return True
-
- localdata = data.createCopy(d)
- data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata)
- data.update_data(localdata)
-
- if fetch_uri(uri, ud, localdata):
- return True
-
- raise FetchError(uri)
-
-
- def checkstatus(self, uri, ud, d):
- return self.go(uri, ud, d, True)
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
deleted file mode 100644
index 4e03fc9884..0000000000
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ /dev/null
@@ -1,1074 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-from __future__ import absolute_import
-from __future__ import print_function
-import os, re
-import logging
-import bb
-from bb import data
-from bb import persist_data
-from bb import utils
-
-__version__ = "2"
-
-logger = logging.getLogger("BitBake.Fetcher")
-
-class BBFetchException(Exception):
- """Class all fetch exceptions inherit from"""
- def __init__(self, message):
- self.msg = message
- Exception.__init__(self, message)
-
- def __str__(self):
- return self.msg
-
-class MalformedUrl(BBFetchException):
- """Exception raised when encountering an invalid url"""
- def __init__(self, url):
- msg = "The URL: '%s' is invalid and cannot be interpreted" % url
- self.url = url
- BBFetchException.__init__(self, msg)
- self.args = url
-
-class FetchError(BBFetchException):
- """General fetcher exception when something happens incorrectly"""
- def __init__(self, message, url = None):
- msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
- self.url = url
- BBFetchException.__init__(self, msg)
- self.args = (message, url)
-
-class UnpackError(BBFetchException):
- """General fetcher exception when something happens incorrectly when unpacking"""
- def __init__(self, message, url):
- msg = "Unpack failure for URL: '%s'. %s" % (url, message)
- self.url = url
- BBFetchException.__init__(self, msg)
- self.args = (message, url)
-
-class NoMethodError(BBFetchException):
- """Exception raised when there is no method to obtain a supplied url or set of urls"""
- def __init__(self, url):
- msg = "Could not find a fetcher which supports the URL: '%s'" % url
- self.url = url
- BBFetchException.__init__(self, msg)
- self.args = url
-
-class MissingParameterError(BBFetchException):
- """Exception raised when a fetch method is missing a critical parameter in the url"""
- def __init__(self, missing, url):
- msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
- self.url = url
- self.missing = missing
- BBFetchException.__init__(self, msg)
- self.args = (missing, url)
-
-class ParameterError(BBFetchException):
- """Exception raised when a url cannot be proccessed due to invalid parameters."""
- def __init__(self, message, url):
- msg = "URL: '%s' has invalid parameters. %s" % (url, message)
- self.url = url
- BBFetchException.__init__(self, msg)
- self.args = (message, url)
-
-class MD5SumError(BBFetchException):
- """Exception raised when a MD5 checksum of a file does not match for a downloaded file"""
- def __init__(self, path, wanted, got, url):
- msg = "File: '%s' has md5 checksum %s when %s was expected (from URL: '%s')" % (path, got, wanted, url)
- self.url = url
- self.path = path
- self.wanted = wanted
- self.got = got
- BBFetchException.__init__(self, msg)
- self.args = (path, wanted, got, url)
-
-class SHA256SumError(MD5SumError):
- """Exception raised when a SHA256 checksum of a file does not match for a downloaded file"""
- def __init__(self, path, wanted, got, url):
- msg = "File: '%s' has sha256 checksum %s when %s was expected (from URL: '%s')" % (path, got, wanted, url)
- self.url = url
- self.path = path
- self.wanted = wanted
- self.got = got
- BBFetchException.__init__(self, msg)
- self.args = (path, wanted, got, url)
-
-class NetworkAccess(BBFetchException):
- """Exception raised when network access is disabled but it is required."""
- def __init__(self, url, cmd):
- msg = "Network access disabled through BB_NO_NETWORK but access rquested with command %s (for url %s)" % (cmd, url)
- self.url = url
- self.cmd = cmd
- BBFetchException.__init__(self, msg)
- self.args = (url, cmd)
-
-
-def decodeurl(url):
- """Decodes an URL into the tokens (scheme, network location, path,
- user, password, parameters).
- """
-
- m = re.compile('(?P<type>[^:]*)://((?P<user>.+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
- if not m:
- raise MalformedUrl(url)
-
- type = m.group('type')
- location = m.group('location')
- if not location:
- raise MalformedUrl(url)
- user = m.group('user')
- parm = m.group('parm')
-
- locidx = location.find('/')
- if locidx != -1 and type.lower() != 'file':
- host = location[:locidx]
- path = location[locidx:]
- else:
- host = ""
- path = location
- if user:
- m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
- if m:
- user = m.group('user')
- pswd = m.group('pswd')
- else:
- user = ''
- pswd = ''
-
- p = {}
- if parm:
- for s in parm.split(';'):
- s1, s2 = s.split('=')
- p[s1] = s2
-
- return (type, host, path, user, pswd, p)
-
-def encodeurl(decoded):
- """Encodes a URL from tokens (scheme, network location, path,
- user, password, parameters).
- """
-
- (type, host, path, user, pswd, p) = decoded
-
- if not path:
- raise MissingParameterError('path', "encoded from the data %s" % str(decoded))
- if not type:
- raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
- url = '%s://' % type
- if user and type != "file":
- url += "%s" % user
- if pswd:
- url += ":%s" % pswd
- url += "@"
- if host and type != "file":
- url += "%s" % host
- url += "%s" % path
- if p:
- for parm in p:
- url += ";%s=%s" % (parm, p[parm])
-
- return url
-
-def uri_replace(ud, uri_find, uri_replace, d):
- if not ud.url or not uri_find or not uri_replace:
- logger.debug(1, "uri_replace: passed an undefined value, not replacing")
- uri_decoded = list(decodeurl(ud.url))
- uri_find_decoded = list(decodeurl(uri_find))
- uri_replace_decoded = list(decodeurl(uri_replace))
- result_decoded = ['', '', '', '', '', {}]
- for i in uri_find_decoded:
- loc = uri_find_decoded.index(i)
- result_decoded[loc] = uri_decoded[loc]
- if isinstance(i, basestring):
- if (re.match(i, uri_decoded[loc])):
- result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc])
- if uri_find_decoded.index(i) == 2:
- if ud.mirrortarball:
- result_decoded[loc] = os.path.join(os.path.dirname(result_decoded[loc]), os.path.basename(ud.mirrortarball))
- elif ud.localpath:
- result_decoded[loc] = os.path.join(os.path.dirname(result_decoded[loc]), os.path.basename(ud.localpath))
- else:
- return ud.url
- return encodeurl(result_decoded)
-
-methods = []
-urldata_cache = {}
-saved_headrevs = {}
-
-def fetcher_init(d):
- """
- Called to initialize the fetchers once the configuration data is known.
- Calls before this must not hit the cache.
- """
- pd = persist_data.persist(d)
- # When to drop SCM head revisions controlled by user policy
- srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, True) or "clear"
- if srcrev_policy == "cache":
- logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
- elif srcrev_policy == "clear":
- logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
- try:
- bb.fetch2.saved_headrevs = pd['BB_URI_HEADREVS'].items()
- except:
- pass
- del pd['BB_URI_HEADREVS']
- else:
- raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
-
- for m in methods:
- if hasattr(m, "init"):
- m.init(d)
-
-def fetcher_compare_revisions(d):
- """
- Compare the revisions in the persistant cache with current values and
- return true/false on whether they've changed.
- """
-
- pd = persist_data.persist(d)
- data = pd['BB_URI_HEADREVS'].items()
- data2 = bb.fetch2.saved_headrevs
-
- changed = False
- for key in data:
- if key not in data2 or data2[key] != data[key]:
- logger.debug(1, "%s changed", key)
- changed = True
- return True
- else:
- logger.debug(2, "%s did not change", key)
- return False
-
-def mirror_from_string(data):
- return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
-
-def verify_checksum(u, ud, d):
- """
- verify the MD5 and SHA256 checksum for downloaded src
-
- return value:
- - True: checksum matched
- - False: checksum unmatched
-
- if checksum is missing in recipes file, "BB_STRICT_CHECKSUM" decide the return value.
- if BB_STRICT_CHECKSUM = "1" then return false as unmatched, otherwise return true as
- matched
- """
-
- if not ud.type in ["http", "https", "ftp", "ftps"]:
- return
-
- md5data = bb.utils.md5_file(ud.localpath)
- sha256data = bb.utils.sha256_file(ud.localpath)
-
- if (ud.md5_expected == None or ud.sha256_expected == None):
- logger.warn('Missing SRC_URI checksum for %s, consider adding to the recipe:\n'
- 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"',
- ud.localpath, ud.md5_name, md5data,
- ud.sha256_name, sha256data)
- if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1":
- raise FetchError("No checksum specified for %s." % u, u)
- return
-
- if ud.md5_expected != md5data:
- raise MD5SumError(ud.localpath, ud.md5_expected, md5data, u)
-
- if ud.sha256_expected != sha256data:
- raise SHA256SumError(ud.localpath, ud.sha256_expected, sha256data, u)
-
-def subprocess_setup():
- import signal
- # Python installs a SIGPIPE handler by default. This is usually not what
- # non-Python subprocesses expect.
- # SIGPIPE errors are known issues with gzip/bash
- signal.signal(signal.SIGPIPE, signal.SIG_DFL)
-
-def get_autorev(d):
- # only not cache src rev in autorev case
- if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache":
- bb.data.setVar('__BB_DONT_CACHE', '1', d)
- return "AUTOINC"
-
-def get_srcrev(d):
- """
- Return the version string for the current package
- (usually to be used as PV)
- Most packages usually only have one SCM so we just pass on the call.
- In the multi SCM case, we build a value based on SRCREV_FORMAT which must
- have been set.
- """
-
- scms = []
- fetcher = Fetch(bb.data.getVar('SRC_URI', d, True).split(), d)
- urldata = fetcher.ud
- for u in urldata:
- if urldata[u].method.supports_srcrev():
- scms.append(u)
-
- if len(scms) == 0:
- raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
-
- if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
- return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d, urldata[scms[0]].names[0])
-
- #
- # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
- #
- format = bb.data.getVar('SRCREV_FORMAT', d, True)
- if not format:
- raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
-
- for scm in scms:
- ud = urldata[scm]
- for name in ud.names:
- rev = ud.method.sortable_revision(scm, ud, d, name)
- format = format.replace(name, rev)
-
- return format
-
-def localpath(url, d):
- fetcher = bb.fetch2.Fetch([url], d)
- return fetcher.localpath(url)
-
-def runfetchcmd(cmd, d, quiet = False, cleanup = []):
- """
- Run cmd returning the command output
- Raise an error if interrupted or cmd fails
- Optionally echo command output to stdout
- Optionally remove the files/directories listed in cleanup upon failure
- """
-
- # Need to export PATH as binary could be in metadata paths
- # rather than host provided
- # Also include some other variables.
- # FIXME: Should really include all export varaiables?
- exportvars = ['PATH', 'GIT_PROXY_COMMAND', 'GIT_PROXY_HOST',
- 'GIT_PROXY_PORT', 'GIT_CONFIG', 'http_proxy', 'ftp_proxy',
- 'https_proxy', 'no_proxy', 'ALL_PROXY', 'all_proxy',
- 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', 'HOME']
-
- for var in exportvars:
- val = data.getVar(var, d, True)
- if val:
- cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
-
- logger.debug(1, "Running %s", cmd)
-
- # redirect stderr to stdout
- stdout_handle = os.popen(cmd + " 2>&1", "r")
- output = ""
-
- while True:
- line = stdout_handle.readline()
- if not line:
- break
- if not quiet:
- print(line, end=' ')
- output += line
-
- status = stdout_handle.close() or 0
- signal = status >> 8
- exitstatus = status & 0xff
-
- if (signal or status != 0):
- for f in cleanup:
- try:
- bb.utils.remove(f, True)
- except OSError:
- pass
-
- if signal:
- raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output))
- elif status != 0:
- raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output))
-
- return output
-
-def check_network_access(d, info = "", url = None):
- """
- log remote network access, and error if BB_NO_NETWORK is set
- """
- if bb.data.getVar("BB_NO_NETWORK", d, True) == "1":
- raise NetworkAccess(url, info)
- else:
- logger.debug(1, "Fetcher accessed the network with the command %s" % info)
-
-def try_mirrors(d, origud, mirrors, check = False):
- """
- Try to use a mirrored version of the sources.
- This method will be automatically called before the fetchers go.
-
- d Is a bb.data instance
- uri is the original uri we're trying to download
- mirrors is the list of mirrors we're going to try
- """
- ld = d.createCopy()
- for line in mirrors:
- try:
- (find, replace) = line
- except ValueError:
- continue
- newuri = uri_replace(origud, find, replace, ld)
- if newuri == origud.url:
- continue
- try:
- ud = FetchData(newuri, ld)
- ud.setup_localpath(ld)
-
- if check:
- found = ud.method.checkstatus(newuri, ud, ld)
- if found:
- return found
- continue
-
- if ud.method.need_update(newuri, ud, ld):
- ud.method.download(newuri, ud, ld)
- if hasattr(ud.method,"build_mirror_data"):
- ud.method.build_mirror_data(newuri, ud, ld)
-
- if not ud.localpath or not os.path.exists(ud.localpath):
- continue
-
- if ud.localpath == origud.localpath:
- return ud.localpath
-
- # We may be obtaining a mirror tarball which needs further processing by the real fetcher
- # If that tarball is a local file:// we need to provide a symlink to it
- dldir = ld.getVar("DL_DIR", True)
- if os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
- dest = os.path.join(dldir, os.path.basename(ud.localpath))
- if not os.path.exists(dest):
- os.symlink(ud.localpath, dest)
- return None
- # Otherwise the result is a local file:// and we symlink to it
- if not os.path.exists(origud.localpath):
- os.symlink(ud.localpath, origud.localpath)
- return ud.localpath
-
- except bb.fetch2.NetworkAccess:
- raise
-
- except bb.fetch2.BBFetchException as e:
- logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
- logger.debug(1, str(e))
- try:
- if os.path.isfile(ud.localpath):
- bb.utils.remove(ud.localpath)
- except UnboundLocalError:
- pass
- continue
- return None
-
-def srcrev_internal_helper(ud, d, name):
- """
- Return:
- a) a source revision if specified
- b) latest revision if SRCREV="AUTOINC"
- c) None if not specified
- """
-
- if 'rev' in ud.parm:
- return ud.parm['rev']
-
- if 'tag' in ud.parm:
- return ud.parm['tag']
-
- rev = None
- pn = data.getVar("PN", d, True)
- if name != '':
- rev = data.getVar("SRCREV_%s_pn-%s" % (name, pn), d, True)
- if not rev:
- rev = data.getVar("SRCREV_%s" % name, d, True)
- if not rev:
- rev = data.getVar("SRCREV_pn-%s" % pn, d, True)
- if not rev:
- rev = data.getVar("SRCREV", d, True)
- if rev == "INVALID":
- raise FetchError("Please set SRCREV to a valid value", ud.url)
- if rev == "AUTOINC":
- rev = ud.method.latest_revision(ud.url, ud, d, name)
-
- return rev
-
-class FetchData(object):
- """
- A class which represents the fetcher state for a given URI.
- """
- def __init__(self, url, d):
- # localpath is the location of a downloaded result. If not set, the file is local.
- self.donestamp = None
- self.localfile = ""
- self.localpath = None
- self.lockfile = None
- self.mirrortarball = None
- (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
- self.date = self.getSRCDate(d)
- self.url = url
- if not self.user and "user" in self.parm:
- self.user = self.parm["user"]
- if not self.pswd and "pswd" in self.parm:
- self.pswd = self.parm["pswd"]
- self.setup = False
-
- if "name" in self.parm:
- self.md5_name = "%s.md5sum" % self.parm["name"]
- self.sha256_name = "%s.sha256sum" % self.parm["name"]
- else:
- self.md5_name = "md5sum"
- self.sha256_name = "sha256sum"
- self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d)
- self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d)
-
- self.names = self.parm.get("name",'default').split(',')
-
- self.method = None
- for m in methods:
- if m.supports(url, self, d):
- self.method = m
- break
-
- if not self.method:
- raise NoMethodError(url)
-
- if self.method.supports_srcrev():
- self.revisions = {}
- for name in self.names:
- self.revisions[name] = srcrev_internal_helper(self, d, name)
-
- # add compatibility code for non name specified case
- if len(self.names) == 1:
- self.revision = self.revisions[self.names[0]]
-
- if hasattr(self.method, "urldata_init"):
- self.method.urldata_init(self, d)
-
- if "localpath" in self.parm:
- # if user sets localpath for file, use it instead.
- self.localpath = self.parm["localpath"]
- self.basename = os.path.basename(self.localpath)
- elif self.localfile:
- self.localpath = self.method.localpath(self.url, self, d)
-
- if self.localfile and self.localpath:
- # Note: These files should always be in DL_DIR whereas localpath may not be.
- basepath = bb.data.expand("${DL_DIR}/%s" % os.path.basename(self.localpath), d)
- self.donestamp = basepath + '.done'
- self.lockfile = basepath + '.lock'
-
- def setup_localpath(self, d):
- if not self.localpath:
- self.localpath = self.method.localpath(self.url, self, d)
-
- def getSRCDate(self, d):
- """
- Return the SRC Date for the component
-
- d the bb.data module
- """
- if "srcdate" in self.parm:
- return self.parm['srcdate']
-
- pn = data.getVar("PN", d, True)
-
- if pn:
- return data.getVar("SRCDATE_%s" % pn, d, True) or data.getVar("SRCDATE", d, True) or data.getVar("DATE", d, True)
-
- return data.getVar("SRCDATE", d, True) or data.getVar("DATE", d, True)
-
-class FetchMethod(object):
- """Base class for 'fetch'ing data"""
-
- def __init__(self, urls = []):
- self.urls = []
-
- def supports(self, url, urldata, d):
- """
- Check to see if this fetch class supports a given url.
- """
- return 0
-
- def localpath(self, url, urldata, d):
- """
- Return the local filename of a given url assuming a successful fetch.
- Can also setup variables in urldata for use in go (saving code duplication
- and duplicate code execution)
- """
- return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile)
-
- def _strip_leading_slashes(self, relpath):
- """
- Remove leading slash as os.path.join can't cope
- """
- while os.path.isabs(relpath):
- relpath = relpath[1:]
- return relpath
-
- def setUrls(self, urls):
- self.__urls = urls
-
- def getUrls(self):
- return self.__urls
-
- urls = property(getUrls, setUrls, None, "Urls property")
-
- def need_update(self, url, ud, d):
- """
- Force a fetch, even if localpath exists?
- """
- if os.path.exists(ud.localpath):
- return False
- return True
-
- def supports_srcrev(self):
- """
- The fetcher supports auto source revisions (SRCREV)
- """
- return False
-
- def download(self, url, urldata, d):
- """
- Fetch urls
- Assumes localpath was called first
- """
- raise NoMethodError(url)
-
- def unpack(self, urldata, rootdir, data):
- import subprocess
- iterate = False
- file = urldata.localpath
-
- try:
- unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
- except ValueError, exc:
- bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
- (file, urldata.parm.get('unpack')))
-
- dots = file.split(".")
- if dots[-1] in ['gz', 'bz2', 'Z']:
- efile = os.path.join(bb.data.getVar('WORKDIR', data, True),os.path.basename('.'.join(dots[0:-1])))
- else:
- efile = file
- cmd = None
-
- if unpack:
- if file.endswith('.tar'):
- cmd = 'tar x --no-same-owner -f %s' % file
- elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
- cmd = 'tar xz --no-same-owner -f %s' % file
- elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
- cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
- elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
- cmd = 'gzip -dc %s > %s' % (file, efile)
- elif file.endswith('.bz2'):
- cmd = 'bzip2 -dc %s > %s' % (file, efile)
- elif file.endswith('.tar.xz'):
- cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
- elif file.endswith('.xz'):
- cmd = 'xz -dc %s > %s' % (file, efile)
- elif file.endswith('.zip') or file.endswith('.jar'):
- try:
- dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
- except ValueError, exc:
- bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
- (file, urldata.parm.get('dos')))
- cmd = 'unzip -q -o'
- if dos:
- cmd = '%s -a' % cmd
- cmd = "%s '%s'" % (cmd, file)
- elif file.endswith('.src.rpm') or file.endswith('.srpm'):
- if 'extract' in urldata.parm:
- unpack_file = urldata.parm.get('extract')
- cmd = 'rpm2cpio.sh %s | cpio -i %s' % (file, unpack_file)
- iterate = True
- iterate_file = unpack_file
- else:
- cmd = 'rpm2cpio.sh %s | cpio -i' % (file)
-
- if not unpack or not cmd:
- # If file == dest, then avoid any copies, as we already put the file into dest!
- dest = os.path.join(rootdir, os.path.basename(file))
- if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)):
- if os.path.isdir(file):
- filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, True))
- destdir = "."
- if file[0:len(filesdir)] == filesdir:
- destdir = file[len(filesdir):file.rfind('/')]
- destdir = destdir.strip('/')
- if len(destdir) < 1:
- destdir = "."
- elif not os.access("%s/%s" % (rootdir, destdir), os.F_OK):
- os.makedirs("%s/%s" % (rootdir, destdir))
- cmd = 'cp -pPR %s %s/%s/' % (file, rootdir, destdir)
- #cmd = 'tar -cf - -C "%d" -ps . | tar -xf - -C "%s/%s/"' % (file, rootdir, destdir)
- else:
- # The "destdir" handling was specifically done for FILESPATH
- # items. So, only do so for file:// entries.
- if urldata.type == "file" and urldata.path.find("/") != -1:
- destdir = urldata.path.rsplit("/", 1)[0]
- else:
- destdir = "."
- bb.mkdirhier("%s/%s" % (rootdir, destdir))
- cmd = 'cp %s %s/%s/' % (file, rootdir, destdir)
-
- if not cmd:
- return
-
- # Change to subdir before executing command
- save_cwd = os.getcwd();
- os.chdir(rootdir)
- if 'subdir' in urldata.parm:
- newdir = ("%s/%s" % (rootdir, urldata.parm.get('subdir')))
- bb.mkdirhier(newdir)
- os.chdir(newdir)
-
- cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, True), cmd)
- bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
- ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
-
- os.chdir(save_cwd)
-
- if ret != 0:
- raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
-
- if iterate is True:
- iterate_urldata = urldata
- iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
- self.unpack(urldata, rootdir, data)
-
- return
-
- def clean(self, urldata, d):
- """
- Clean any existing full or partial download
- """
- bb.utils.remove(urldata.localpath)
-
- def try_premirror(self, url, urldata, d):
- """
- Should premirrors be used?
- """
- return True
-
- def checkstatus(self, url, urldata, d):
- """
- Check the status of a URL
- Assumes localpath was called first
- """
- logger.info("URL %s could not be checked for status since no method exists.", url)
- return True
-
- def localcount_internal_helper(ud, d, name):
- """
- Return:
- a) a locked localcount if specified
- b) None otherwise
- """
-
- localcount = None
- if name != '':
- pn = data.getVar("PN", d, True)
- localcount = data.getVar("LOCALCOUNT_" + name, d, True)
- if not localcount:
- localcount = data.getVar("LOCALCOUNT", d, True)
- return localcount
-
- localcount_internal_helper = staticmethod(localcount_internal_helper)
-
- def latest_revision(self, url, ud, d, name):
- """
- Look in the cache for the latest revision, if not present ask the SCM.
- """
- if not hasattr(self, "_latest_revision"):
- raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
-
- pd = persist_data.persist(d)
- revs = pd['BB_URI_HEADREVS']
- key = self.generate_revision_key(url, ud, d, name)
- rev = revs[key]
- if rev != None:
- return str(rev)
-
- revs[key] = rev = self._latest_revision(url, ud, d, name)
- return rev
-
- def sortable_revision(self, url, ud, d, name):
- """
-
- """
- if hasattr(self, "_sortable_revision"):
- return self._sortable_revision(url, ud, d)
-
- pd = persist_data.persist(d)
- localcounts = pd['BB_URI_LOCALCOUNT']
- key = self.generate_revision_key(url, ud, d, name)
-
- latest_rev = self._build_revision(url, ud, d, name)
- last_rev = localcounts[key + '_rev']
- uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
- count = None
- if uselocalcount:
- count = FetchMethod.localcount_internal_helper(ud, d, name)
- if count is None:
- count = localcounts[key + '_count'] or "0"
-
- if last_rev == latest_rev:
- return str(count + "+" + latest_rev)
-
- buildindex_provided = hasattr(self, "_sortable_buildindex")
- if buildindex_provided:
- count = self._sortable_buildindex(url, ud, d, latest_rev)
-
- if count is None:
- count = "0"
- elif uselocalcount or buildindex_provided:
- count = str(count)
- else:
- count = str(int(count) + 1)
-
- localcounts[key + '_rev'] = latest_rev
- localcounts[key + '_count'] = count
-
- return str(count + "+" + latest_rev)
-
- def generate_revision_key(self, url, ud, d, name):
- key = self._revision_key(url, ud, d, name)
- return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
-
-class Fetch(object):
- def __init__(self, urls, d, cache = True):
- if len(urls) == 0:
- urls = d.getVar("SRC_URI", True).split()
- self.urls = urls
- self.d = d
- self.ud = {}
-
- fn = bb.data.getVar('FILE', d, True)
- if cache and fn in urldata_cache:
- self.ud = urldata_cache[fn]
-
- for url in urls:
- if url not in self.ud:
- self.ud[url] = FetchData(url, d)
-
- if cache:
- urldata_cache[fn] = self.ud
-
- def localpath(self, url):
- if url not in self.urls:
- self.ud[url] = FetchData(url, self.d)
-
- self.ud[url].setup_localpath(self.d)
- return bb.data.expand(self.ud[url].localpath, self.d)
-
- def localpaths(self):
- """
- Return a list of the local filenames, assuming successful fetch
- """
- local = []
-
- for u in self.urls:
- ud = self.ud[u]
- ud.setup_localpath(self.d)
- local.append(ud.localpath)
-
- return local
-
- def download(self, urls = []):
- """
- Fetch all urls
- """
- if len(urls) == 0:
- urls = self.urls
-
- network = bb.data.getVar("BB_NO_NETWORK", self.d, True)
- premirroronly = (bb.data.getVar("BB_FETCH_PREMIRRORONLY", self.d, True) == "1")
-
- for u in urls:
- ud = self.ud[u]
- ud.setup_localpath(self.d)
- m = ud.method
- localpath = ""
-
- if not ud.localfile:
- continue
-
- lf = bb.utils.lockfile(ud.lockfile)
-
- try:
- bb.data.setVar("BB_NO_NETWORK", network, self.d)
-
- if not m.need_update(u, ud, self.d):
- localpath = ud.localpath
- elif m.try_premirror(u, ud, self.d):
- logger.debug(1, "Trying PREMIRRORS")
- mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True))
- localpath = try_mirrors(self.d, ud, mirrors, False)
-
- if premirroronly:
- bb.data.setVar("BB_NO_NETWORK", "1", self.d)
-
- if not localpath and m.need_update(u, ud, self.d):
- try:
- logger.debug(1, "Trying Upstream")
- m.download(u, ud, self.d)
- if hasattr(m, "build_mirror_data"):
- m.build_mirror_data(u, ud, self.d)
- localpath = ud.localpath
-
- except bb.fetch2.NetworkAccess:
- raise
-
- except BBFetchException as e:
- logger.debug(1, str(e))
- # Remove any incomplete fetch
- if os.path.isfile(ud.localpath):
- bb.utils.remove(ud.localpath)
- logger.debug(1, "Trying MIRRORS")
- mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True))
- localpath = try_mirrors (self.d, ud, mirrors)
-
- if not localpath or not os.path.exists(localpath):
- raise FetchError("Unable to fetch URL %s from any source." % u, u)
-
- if os.path.exists(ud.donestamp):
- # Touch the done stamp file to show active use of the download
- try:
- os.utime(ud.donestamp, None)
- except:
- # Errors aren't fatal here
- pass
- else:
- # Only check the checksums if we've not seen this item before, then create the stamp
- verify_checksum(u, ud, self.d)
- open(ud.donestamp, 'w').close()
-
- finally:
- bb.utils.unlockfile(lf)
-
- def checkstatus(self, urls = []):
- """
- Check all urls exist upstream
- """
-
- if len(urls) == 0:
- urls = self.urls
-
- for u in urls:
- ud = self.ud[u]
- ud.setup_localpath(self.d)
- m = ud.method
- logger.debug(1, "Testing URL %s", u)
- # First try checking uri, u, from PREMIRRORS
- mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True))
- ret = try_mirrors(self.d, ud, mirrors, True)
- if not ret:
- # Next try checking from the original uri, u
- try:
- ret = m.checkstatus(u, ud, self.d)
- except:
- # Finally, try checking uri, u, from MIRRORS
- mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True))
- ret = try_mirrors (self.d, ud, mirrors, True)
-
- if not ret:
- raise FetchError("URL %s doesn't work" % u, u)
-
- def unpack(self, root, urls = []):
- """
- Check all urls exist upstream
- """
-
- if len(urls) == 0:
- urls = self.urls
-
- for u in urls:
- ud = self.ud[u]
- ud.setup_localpath(self.d)
-
- if bb.data.expand(self.localpath, self.d) is None:
- continue
-
- if ud.lockfile:
- lf = bb.utils.lockfile(ud.lockfile)
-
- ud.method.unpack(ud, root, self.d)
-
- if ud.lockfile:
- bb.utils.unlockfile(lf)
-
- def clean(self, urls = []):
- """
- Clean files that the fetcher gets or places
- """
-
- if len(urls) == 0:
- urls = self.urls
-
- for url in urls:
- if url not in self.ud:
- self.ud[url] = FetchData(url, d)
- ud = self.ud[url]
- ud.setup_localpath(self.d)
-
- if not ud.localfile or self.localpath is None:
- continue
-
- if ud.lockfile:
- lf = bb.utils.lockfile(ud.lockfile)
-
- ud.method.clean(ud, self.d)
- if ud.donestamp:
- bb.utils.remove(ud.donestamp)
-
- if ud.lockfile:
- bb.utils.unlockfile(lf)
-
-from . import cvs
-from . import git
-from . import local
-from . import svn
-from . import wget
-from . import svk
-from . import ssh
-from . import perforce
-from . import bzr
-from . import hg
-from . import osc
-from . import repo
-
-methods.append(local.Local())
-methods.append(wget.Wget())
-methods.append(svn.Svn())
-methods.append(git.Git())
-methods.append(cvs.Cvs())
-methods.append(svk.Svk())
-methods.append(ssh.SSH())
-methods.append(perforce.Perforce())
-methods.append(bzr.Bzr())
-methods.append(hg.Hg())
-methods.append(osc.Osc())
-methods.append(repo.Repo())
diff --git a/bitbake/lib/bb/fetch2/bzr.py b/bitbake/lib/bb/fetch2/bzr.py
deleted file mode 100644
index 454961eff0..0000000000
--- a/bitbake/lib/bb/fetch2/bzr.py
+++ /dev/null
@@ -1,141 +0,0 @@
-"""
-BitBake 'Fetch' implementation for bzr.
-
-"""
-
-# Copyright (C) 2007 Ross Burton
-# Copyright (C) 2007 Richard Purdie
-#
-# Classes for obtaining upstream sources for the
-# BitBake build tools.
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os
-import sys
-import logging
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import runfetchcmd
-from bb.fetch2 import logger
-
-class Bzr(FetchMethod):
- def supports(self, url, ud, d):
- return ud.type in ['bzr']
-
- def urldata_init(self, ud, d):
- """
- init bzr specific variable within url data
- """
- # Create paths to bzr checkouts
- relpath = self._strip_leading_slashes(ud.path)
- ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath)
-
- if not ud.revision:
- ud.revision = self.latest_revision(ud.url, ud, d)
-
- ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
-
- def _buildbzrcommand(self, ud, d, command):
- """
- Build up an bzr commandline based on ud
- command is "fetch", "update", "revno"
- """
-
- basecmd = data.expand('${FETCHCMD_bzr}', d)
-
- proto = ud.parm.get('proto', 'http')
-
- bzrroot = ud.host + ud.path
-
- options = []
-
- if command is "revno":
- bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
- else:
- if ud.revision:
- options.append("-r %s" % ud.revision)
-
- if command is "fetch":
- bzrcmd = "%s co %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
- elif command is "update":
- bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options))
- else:
- raise FetchError("Invalid bzr command %s" % command, ud.url)
-
- return bzrcmd
-
- def download(self, loc, ud, d):
- """Fetch url"""
-
- if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
- bzrcmd = self._buildbzrcommand(ud, d, "update")
- logger.debug(1, "BZR Update %s", loc)
- bb.fetch2.check_network_access(d, bzrcmd, ud.url)
- os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
- runfetchcmd(bzrcmd, d)
- else:
- bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
- bzrcmd = self._buildbzrcommand(ud, d, "fetch")
- bb.fetch2.check_network_access(d, bzrcmd, ud.url)
- logger.debug(1, "BZR Checkout %s", loc)
- bb.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- logger.debug(1, "Running %s", bzrcmd)
- runfetchcmd(bzrcmd, d)
-
- os.chdir(ud.pkgdir)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"
-
- # tar them up to a defined filename
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d, cleanup = [ud.localpath])
-
- def supports_srcrev(self):
- return True
-
- def _revision_key(self, url, ud, d, name):
- """
- Return a unique key for the url
- """
- return "bzr:" + ud.pkgdir
-
- def _latest_revision(self, url, ud, d, name):
- """
- Return the latest upstream revision number
- """
- logger.debug(2, "BZR fetcher hitting network for %s", url)
-
- bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url)
-
- output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
-
- return output.strip()
-
- def _sortable_revision(self, url, ud, d):
- """
- Return a sortable revision number which in our case is the revision number
- """
-
- return self._build_revision(url, ud, d)
-
- def _build_revision(self, url, ud, d):
- return ud.revision
diff --git a/bitbake/lib/bb/fetch2/cvs.py b/bitbake/lib/bb/fetch2/cvs.py
deleted file mode 100644
index 12d11e0d5b..0000000000
--- a/bitbake/lib/bb/fetch2/cvs.py
+++ /dev/null
@@ -1,181 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-#Based on functions from the base bb module, Copyright 2003 Holger Schurig
-#
-
-import os
-import logging
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod, FetchError, MissingParameterError, logger
-from bb.fetch2 import runfetchcmd
-
-class Cvs(FetchMethod):
- """
- Class to fetch a module or modules from cvs repositories
- """
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with cvs.
- """
- return ud.type in ['cvs']
-
- def urldata_init(self, ud, d):
- if not "module" in ud.parm:
- raise MissingParameterError("module", ud.url)
- ud.module = ud.parm["module"]
-
- ud.tag = ud.parm.get('tag', "")
-
- # Override the default date in certain cases
- if 'date' in ud.parm:
- ud.date = ud.parm['date']
- elif ud.tag:
- ud.date = ""
-
- norecurse = ''
- if 'norecurse' in ud.parm:
- norecurse = '_norecurse'
-
- fullpath = ''
- if 'fullpath' in ud.parm:
- fullpath = '_fullpath'
-
- ud.localfile = data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
-
- def need_update(self, url, ud, d):
- if (ud.date == "now"):
- return True
- if not os.path.exists(ud.localpath):
- return True
- return False
-
- def download(self, loc, ud, d):
-
- method = ud.parm.get('method', 'pserver')
- localdir = ud.parm.get('localdir', ud.module)
- cvs_port = ud.parm.get('port', '')
-
- cvs_rsh = None
- if method == "ext":
- if "rsh" in ud.parm:
- cvs_rsh = ud.parm["rsh"]
-
- if method == "dir":
- cvsroot = ud.path
- else:
- cvsroot = ":" + method
- cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True)
- if cvsproxyhost:
- cvsroot += ";proxy=" + cvsproxyhost
- cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True)
- if cvsproxyport:
- cvsroot += ";proxyport=" + cvsproxyport
- cvsroot += ":" + ud.user
- if ud.pswd:
- cvsroot += ":" + ud.pswd
- cvsroot += "@" + ud.host + ":" + cvs_port + ud.path
-
- options = []
- if 'norecurse' in ud.parm:
- options.append("-l")
- if ud.date:
- # treat YYYYMMDDHHMM specially for CVS
- if len(ud.date) == 12:
- options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12]))
- else:
- options.append("-D \"%s UTC\"" % ud.date)
- if ud.tag:
- options.append("-r %s" % ud.tag)
-
- localdata = data.createCopy(d)
- data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
- data.update_data(localdata)
-
- data.setVar('CVSROOT', cvsroot, localdata)
- data.setVar('CVSCOOPTS', " ".join(options), localdata)
- data.setVar('CVSMODULE', ud.module, localdata)
- cvscmd = data.getVar('FETCHCOMMAND', localdata, True)
- cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, True)
-
- if cvs_rsh:
- cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
- cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
-
- # create module directory
- logger.debug(2, "Fetch: checking for module directory")
- pkg = data.expand('${PN}', d)
- pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
- moddir = os.path.join(pkgdir, localdir)
- if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
- logger.info("Update " + loc)
- bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url)
- # update sources there
- os.chdir(moddir)
- cmd = cvsupdatecmd
- else:
- logger.info("Fetch " + loc)
- # check out sources there
- bb.mkdirhier(pkgdir)
- os.chdir(pkgdir)
- logger.debug(1, "Running %s", cvscmd)
- bb.fetch2.check_network_access(d, cvscmd, ud.url)
- cmd = cvscmd
-
- runfetchcmd(cmd, d, cleanup = [moddir])
-
- if not os.access(moddir, os.R_OK):
- raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude 'CVS'"
-
- # tar them up to a defined filename
- if 'fullpath' in ud.parm:
- os.chdir(pkgdir)
- cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)
- else:
- os.chdir(moddir)
- os.chdir('..')
- cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))
-
- runfetchcmd(cmd, d, cleanup = [ud.localpath])
-
- def clean(self, ud, d):
- """ Clean CVS Files and tarballs """
-
- pkg = data.expand('${PN}', d)
- localdata = data.createCopy(d)
- data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
- data.update_data(localdata)
- pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
-
- bb.utils.remove(pkgdir, True)
- bb.utils.remove(ud.localpath)
-
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
deleted file mode 100644
index f2c27e42a7..0000000000
--- a/bitbake/lib/bb/fetch2/git.py
+++ /dev/null
@@ -1,242 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' git implementation
-
-"""
-
-#Copyright (C) 2005 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import runfetchcmd
-from bb.fetch2 import logger
-
-class Git(FetchMethod):
- """Class to fetch a module or modules from git repositories"""
- def init(self, d):
- #
- # Only enable _sortable revision if the key is set
- #
- if bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True):
- self._sortable_buildindex = self._sortable_buildindex_disabled
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with git.
- """
- return ud.type in ['git']
-
- def urldata_init(self, ud, d):
- """
- init git specific variable within url data
- so that the git method like latest_revision() can work
- """
- if 'protocol' in ud.parm:
- ud.proto = ud.parm['protocol']
- elif not ud.host:
- ud.proto = 'file'
- else:
- ud.proto = "rsync"
-
- ud.nocheckout = False
- if 'nocheckout' in ud.parm:
- ud.nocheckout = True
-
- branches = ud.parm.get("branch", "master").split(',')
- if len(branches) != len(ud.names):
- raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
- ud.branches = {}
- for name in ud.names:
- branch = branches[ud.names.index(name)]
- ud.branches[name] = branch
-
- gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.'))
- ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname)
- ud.fullmirror = os.path.join(data.getVar("DL_DIR", d, True), ud.mirrortarball)
- ud.clonedir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)
-
- ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
-
- for name in ud.names:
- # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
- if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
- ud.revisions[name] = self.latest_revision(ud.url, ud, d, name)
-
- ud.write_tarballs = (data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0"
-
- ud.localfile = ud.clonedir
-
- def localpath(self, url, ud, d):
- return ud.clonedir
-
- def need_update(self, u, ud, d):
- if not os.path.exists(ud.clonedir):
- return True
- os.chdir(ud.clonedir)
- for name in ud.names:
- if not self._contains_ref(ud.revisions[name], d):
- return True
- if ud.write_tarballs and not os.path.exists(ud.fullmirror):
- return True
- return False
-
- def try_premirror(self, u, ud, d):
- # If we don't do this, updating an existing checkout with only premirrors
- # is not possible
- if bb.data.getVar("BB_FETCH_PREMIRRORONLY", d, True) is not None:
- return True
- if os.path.exists(ud.clonedir):
- return False
- return True
-
- def download(self, loc, ud, d):
- """Fetch url"""
-
- if ud.user:
- username = ud.user + '@'
- else:
- username = ""
-
- ud.repochanged = not os.path.exists(ud.fullmirror)
-
- # If the checkout doesn't exist and the mirror tarball does, extract it
- if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
- bb.mkdirhier(ud.clonedir)
- os.chdir(ud.clonedir)
- runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
-
- # If the repo still doesn't exist, fallback to cloning it
- if not os.path.exists(ud.clonedir):
- bb.fetch2.check_network_access(d, "git clone --bare %s%s" % (ud.host, ud.path))
- runfetchcmd("%s clone --bare %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d)
-
- os.chdir(ud.clonedir)
- # Update the checkout if needed
- needupdate = False
- for name in ud.names:
- if not self._contains_ref(ud.revisions[name], d):
- needupdate = True
- if needupdate:
- bb.fetch2.check_network_access(d, "git fetch %s%s" % (ud.host, ud.path), ud.url)
- try:
- runfetchcmd("%s remote prune origin" % ud.basecmd, d)
- runfetchcmd("%s remote rm origin" % ud.basecmd, d)
- except bb.fetch2.FetchError:
- logger.debug(1, "No Origin")
-
- runfetchcmd("%s remote add origin %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d)
- runfetchcmd("%s fetch --all -t" % ud.basecmd, d)
- runfetchcmd("%s prune-packed" % ud.basecmd, d)
- runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
- ud.repochanged = True
-
- def build_mirror_data(self, url, ud, d):
- # Generate a mirror tarball if needed
- if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)):
- os.chdir(ud.clonedir)
- logger.info("Creating tarball of git repository")
- runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d)
-
- def unpack(self, ud, destdir, d):
- """ unpack the downloaded src to destdir"""
-
- subdir = ud.parm.get("subpath", "")
- if subdir != "":
- readpathspec = ":%s" % (subdir)
- else:
- readpathspec = ""
-
- destdir = os.path.join(destdir, "git/")
- if os.path.exists(destdir):
- bb.utils.prunedir(destdir)
-
- runfetchcmd("git clone -s -n %s %s" % (ud.clonedir, destdir), d)
- if not ud.nocheckout:
- os.chdir(destdir)
- runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d)
- runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
- return True
-
- def clean(self, ud, d):
- """ clean the git directory """
-
- bb.utils.remove(ud.localpath, True)
- bb.utils.remove(ud.fullmirror)
-
- def supports_srcrev(self):
- return True
-
- def _contains_ref(self, tag, d):
- basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
- output = runfetchcmd("%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag), d, quiet=True)
- return output.split()[0] != "0"
-
- def _revision_key(self, url, ud, d, name):
- """
- Return a unique key for the url
- """
- return "git:" + ud.host + ud.path.replace('/', '.') + ud.branches[name]
-
- def _latest_revision(self, url, ud, d, name):
- """
- Compute the HEAD revision for the url
- """
- if ud.user:
- username = ud.user + '@'
- else:
- username = ""
-
- bb.fetch2.check_network_access(d, "git ls-remote %s%s %s" % (ud.host, ud.path, ud.branches[name]))
- basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
- cmd = "%s ls-remote %s://%s%s%s %s" % (basecmd, ud.proto, username, ud.host, ud.path, ud.branches[name])
- output = runfetchcmd(cmd, d, True)
- if not output:
- raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, url)
- return output.split()[0]
-
- def _build_revision(self, url, ud, d, name):
- return ud.revisions[name]
-
- def _sortable_buildindex_disabled(self, url, ud, d, rev):
- """
- Return a suitable buildindex for the revision specified. This is done by counting revisions
- using "git rev-list" which may or may not work in different circumstances.
- """
-
- cwd = os.getcwd()
-
- # Check if we have the rev already
-
- if not os.path.exists(ud.clonedir):
- print("no repo")
- self.download(None, ud, d)
- if not os.path.exists(ud.clonedir):
- logger.error("GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value", url, ud.clonedir)
- return None
-
-
- os.chdir(ud.clonedir)
- if not self._contains_ref(rev, d):
- self.download(None, ud, d)
-
- output = runfetchcmd("%s rev-list %s -- 2> /dev/null | wc -l" % (ud.basecmd, rev), d, quiet=True)
- os.chdir(cwd)
-
- buildindex = "%s" % output.split()[0]
- logger.debug(1, "GIT repository for %s in %s is returning %s revisions in rev-list before %s", url, ud.clonedir, buildindex, rev)
- return buildindex
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py
deleted file mode 100644
index 6a56f8d0cd..0000000000
--- a/bitbake/lib/bb/fetch2/hg.py
+++ /dev/null
@@ -1,174 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementation for mercurial DRCS (hg).
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2004 Marcin Juszkiewicz
-# Copyright (C) 2007 Robert Schuster
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import sys
-import logging
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import MissingParameterError
-from bb.fetch2 import runfetchcmd
-from bb.fetch2 import logger
-
-class Hg(FetchMethod):
- """Class to fetch from mercurial repositories"""
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with mercurial.
- """
- return ud.type in ['hg']
-
- def urldata_init(self, ud, d):
- """
- init hg specific variable within url data
- """
- if not "module" in ud.parm:
- raise MissingParameterError('module', ud.url)
-
- ud.module = ud.parm["module"]
-
- # Create paths to mercurial checkouts
- relpath = self._strip_leading_slashes(ud.path)
- ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
- ud.moddir = os.path.join(ud.pkgdir, ud.module)
-
- if 'rev' in ud.parm:
- ud.revision = ud.parm['rev']
- elif not ud.revision:
- ud.revision = self.latest_revision(ud.url, ud, d)
-
- ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
-
- def need_update(self, url, ud, d):
- revTag = ud.parm.get('rev', 'tip')
- if revTag == "tip":
- return True
- if not os.path.exists(ud.localpath):
- return True
- return False
-
- def _buildhgcommand(self, ud, d, command):
- """
- Build up an hg commandline based on ud
- command is "fetch", "update", "info"
- """
-
- basecmd = data.expand('${FETCHCMD_hg}', d)
-
- proto = ud.parm.get('proto', 'http')
-
- host = ud.host
- if proto == "file":
- host = "/"
- ud.host = "localhost"
-
- if not ud.user:
- hgroot = host + ud.path
- else:
- hgroot = ud.user + "@" + host + ud.path
-
- if command is "info":
- return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module)
-
- options = [];
- if ud.revision:
- options.append("-r %s" % ud.revision)
-
- if command is "fetch":
- cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
- elif command is "pull":
- # do not pass options list; limiting pull to rev causes the local
- # repo not to contain it and immediately following "update" command
- # will crash
- cmd = "%s pull" % (basecmd)
- elif command is "update":
- cmd = "%s update -C %s" % (basecmd, " ".join(options))
- else:
- raise FetchError("Invalid hg command %s" % command, ud.url)
-
- return cmd
-
- def download(self, loc, ud, d):
- """Fetch url"""
-
- logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
-
- if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
- updatecmd = self._buildhgcommand(ud, d, "pull")
- logger.info("Update " + loc)
- # update sources there
- os.chdir(ud.moddir)
- logger.debug(1, "Running %s", updatecmd)
- bb.fetch2.check_network_access(d, updatecmd, ud.url)
- runfetchcmd(updatecmd, d)
-
- else:
- fetchcmd = self._buildhgcommand(ud, d, "fetch")
- logger.info("Fetch " + loc)
- # check out sources there
- bb.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- logger.debug(1, "Running %s", fetchcmd)
- bb.fetch2.check_network_access(d, fetchcmd, ud.url)
- runfetchcmd(fetchcmd, d)
-
- # Even when we clone (fetch), we still need to update as hg's clone
- # won't checkout the specified revision if its on a branch
- updatecmd = self._buildhgcommand(ud, d, "update")
- os.chdir(ud.moddir)
- logger.debug(1, "Running %s", updatecmd)
- runfetchcmd(updatecmd, d)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude '.hg' --exclude '.hgrags'"
-
- os.chdir(ud.pkgdir)
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
-
- def supports_srcrev(self):
- return True
-
- def _latest_revision(self, url, ud, d, name):
- """
- Compute tip revision for the url
- """
- bb.fetch2.check_network_access(d, self._buildhgcommand(ud, d, "info"))
- output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
- return output.strip()
-
- def _build_revision(self, url, ud, d):
- return ud.revision
-
- def _revision_key(self, url, ud, d, name):
- """
- Return a unique key for the url
- """
- return "hg:" + ud.moddir
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py
deleted file mode 100644
index 77a296ec67..0000000000
--- a/bitbake/lib/bb/fetch2/local.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import bb
-import bb.utils
-from bb import data
-from bb.fetch2 import FetchMethod
-
-class Local(FetchMethod):
- def supports(self, url, urldata, d):
- """
- Check to see if a given url represents a local fetch.
- """
- return urldata.type in ['file']
-
- def urldata_init(self, ud, d):
- # We don't set localfile as for this fetcher the file is already local!
- return
-
- def localpath(self, url, urldata, d):
- """
- Return the local filename of a given url assuming a successful fetch.
- """
- path = url.split("://")[1]
- path = path.split(";")[0]
- newpath = path
- if path[0] != "/":
- filespath = data.getVar('FILESPATH', d, True)
- if filespath:
- newpath = bb.utils.which(filespath, path)
- if not newpath:
- filesdir = data.getVar('FILESDIR', d, True)
- if filesdir:
- newpath = os.path.join(filesdir, path)
- return newpath
-
- def download(self, url, urldata, d):
- """Fetch urls (no-op for Local method)"""
- # no need to fetch local files, we'll deal with them in place.
- return 1
-
- def checkstatus(self, url, urldata, d):
- """
- Check the status of the url
- """
- if urldata.localpath.find("*") != -1:
- logger.info("URL %s looks like a glob and was therefore not checked.", url)
- return True
- if os.path.exists(urldata.localpath):
- return True
- return False
-
- def clean(self, urldata, d):
- return
-
diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py
deleted file mode 100644
index 4bf411c24f..0000000000
--- a/bitbake/lib/bb/fetch2/osc.py
+++ /dev/null
@@ -1,135 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-Bitbake "Fetch" implementation for osc (Opensuse build service client).
-Based on the svn "Fetch" implementation.
-
-"""
-
-import os
-import sys
-import logging
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import MissingParameterError
-from bb.fetch2 import runfetchcmd
-
-class Osc(FetchMethod):
- """Class to fetch a module or modules from Opensuse build server
- repositories."""
-
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with osc.
- """
- return ud.type in ['osc']
-
- def urldata_init(self, ud, d):
- if not "module" in ud.parm:
- raise MissingParameterError('module', ud.url)
-
- ud.module = ud.parm["module"]
-
- # Create paths to osc checkouts
- relpath = self._strip_leading_slashes(ud.path)
- ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host)
- ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
-
- if 'rev' in ud.parm:
- ud.revision = ud.parm['rev']
- else:
- pv = data.getVar("PV", d, 0)
- rev = bb.fetch2.srcrev_internal_helper(ud, d)
- if rev and rev != True:
- ud.revision = rev
- else:
- ud.revision = ""
-
- ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d)
-
- def _buildosccommand(self, ud, d, command):
- """
- Build up an ocs commandline based on ud
- command is "fetch", "update", "info"
- """
-
- basecmd = data.expand('${FETCHCMD_osc}', d)
-
- proto = ud.parm.get('proto', 'ocs')
-
- options = []
-
- config = "-c %s" % self.generate_config(ud, d)
-
- if ud.revision:
- options.append("-r %s" % ud.revision)
-
- coroot = self._strip_leading_slashes(ud.path)
-
- if command is "fetch":
- osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options))
- elif command is "update":
- osccmd = "%s %s up %s" % (basecmd, config, " ".join(options))
- else:
- raise FetchError("Invalid osc command %s" % command, ud.url)
-
- return osccmd
-
- def download(self, loc, ud, d):
- """
- Fetch url
- """
-
- logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
-
- if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
- oscupdatecmd = self._buildosccommand(ud, d, "update")
- logger.info("Update "+ loc)
- # update sources there
- os.chdir(ud.moddir)
- logger.debug(1, "Running %s", oscupdatecmd)
- bb.fetch2.check_network_access(d, oscupdatecmd, ud.url)
- runfetchcmd(oscupdatecmd, d)
- else:
- oscfetchcmd = self._buildosccommand(ud, d, "fetch")
- logger.info("Fetch " + loc)
- # check out sources there
- bb.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- logger.debug(1, "Running %s", oscfetchcmd)
- bb.fetch2.check_network_access(d, oscfetchcmd, ud.url)
- runfetchcmd(oscfetchcmd, d)
-
- os.chdir(os.path.join(ud.pkgdir + ud.path))
- # tar them up to a defined filename
- runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d, cleanup = [ud.localpath])
-
- def supports_srcrev(self):
- return False
-
- def generate_config(self, ud, d):
- """
- Generate a .oscrc to be used for this run.
- """
-
- config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc")
- if (os.path.exists(config_path)):
- os.remove(config_path)
-
- f = open(config_path, 'w')
- f.write("[general]\n")
- f.write("apisrv = %s\n" % ud.host)
- f.write("scheme = http\n")
- f.write("su-wrapper = su -c\n")
- f.write("build-root = %s\n" % data.expand('${WORKDIR}', d))
- f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n")
- f.write("extra-pkgs = gzip\n")
- f.write("\n")
- f.write("[%s]\n" % ud.host)
- f.write("user = %s\n" % ud.parm["user"])
- f.write("pass = %s\n" % ud.parm["pswd"])
- f.close()
-
- return config_path
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py
deleted file mode 100644
index 6347834c76..0000000000
--- a/bitbake/lib/bb/fetch2/perforce.py
+++ /dev/null
@@ -1,196 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-from future_builtins import zip
-import os
-import logging
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import logger
-from bb.fetch2 import runfetchcmd
-
-class Perforce(FetchMethod):
- def supports(self, url, ud, d):
- return ud.type in ['p4']
-
- def doparse(url, d):
- parm = {}
- path = url.split("://")[1]
- delim = path.find("@");
- if delim != -1:
- (user, pswd, host, port) = path.split('@')[0].split(":")
- path = path.split('@')[1]
- else:
- (host, port) = data.getVar('P4PORT', d).split(':')
- user = ""
- pswd = ""
-
- if path.find(";") != -1:
- keys=[]
- values=[]
- plist = path.split(';')
- for item in plist:
- if item.count('='):
- (key, value) = item.split('=')
- keys.append(key)
- values.append(value)
-
- parm = dict(zip(keys, values))
- path = "//" + path.split(';')[0]
- host += ":%s" % (port)
- parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
-
- return host, path, user, pswd, parm
- doparse = staticmethod(doparse)
-
- def getcset(d, depot, host, user, pswd, parm):
- p4opt = ""
- if "cset" in parm:
- return parm["cset"];
- if user:
- p4opt += " -u %s" % (user)
- if pswd:
- p4opt += " -P %s" % (pswd)
- if host:
- p4opt += " -p %s" % (host)
-
- p4date = data.getVar("P4DATE", d, True)
- if "revision" in parm:
- depot += "#%s" % (parm["revision"])
- elif "label" in parm:
- depot += "@%s" % (parm["label"])
- elif p4date:
- depot += "@%s" % (p4date)
-
- p4cmd = data.getVar('FETCHCOMMAND_p4', d, True)
- logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
- p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
- cset = p4file.readline().strip()
- logger.debug(1, "READ %s", cset)
- if not cset:
- return -1
-
- return cset.split(' ')[1]
- getcset = staticmethod(getcset)
-
- def urldata_init(self, ud, d):
- (host, path, user, pswd, parm) = Perforce.doparse(ud.url, d)
-
- # If a label is specified, we use that as our filename
-
- if "label" in parm:
- ud.localfile = "%s.tar.gz" % (parm["label"])
- return
-
- base = path
- which = path.find('/...')
- if which != -1:
- base = path[:which]
-
- base = self._strip_leading_slashes(base)
-
- cset = Perforce.getcset(d, path, host, user, pswd, parm)
-
- ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d)
-
- def download(self, loc, ud, d):
- """
- Fetch urls
- """
-
- (host, depot, user, pswd, parm) = Perforce.doparse(loc, d)
-
- if depot.find('/...') != -1:
- path = depot[:depot.find('/...')]
- else:
- path = depot
-
- module = parm.get('module', os.path.basename(path))
-
- localdata = data.createCopy(d)
- data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata)
- data.update_data(localdata)
-
- # Get the p4 command
- p4opt = ""
- if user:
- p4opt += " -u %s" % (user)
-
- if pswd:
- p4opt += " -P %s" % (pswd)
-
- if host:
- p4opt += " -p %s" % (host)
-
- p4cmd = data.getVar('FETCHCOMMAND', localdata, True)
-
- # create temp directory
- logger.debug(2, "Fetch: creating temporary directory")
- bb.mkdirhier(data.expand('${WORKDIR}', localdata))
- data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
- tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
- tmpfile = tmppipe.readline().strip()
- if not tmpfile:
- raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)
-
- if "label" in parm:
- depot = "%s@%s" % (depot, parm["label"])
- else:
- cset = Perforce.getcset(d, depot, host, user, pswd, parm)
- depot = "%s@%s" % (depot, cset)
-
- os.chdir(tmpfile)
- logger.info("Fetch " + loc)
- logger.info("%s%s files %s", p4cmd, p4opt, depot)
- p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot))
-
- if not p4file:
- raise FetchError("Fetch: unable to get the P4 files from %s" % depot, loc)
-
- count = 0
-
- for file in p4file:
- list = file.split()
-
- if list[2] == "delete":
- continue
-
- dest = list[0][len(path)+1:]
- where = dest.find("#")
-
- os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]))
- count = count + 1
-
- if count == 0:
- logger.error()
- raise FetchError("Fetch: No files gathered from the P4 fetch", loc)
-
- runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath])
- # cleanup
- bb.utils.prunedir(tmpfile)
diff --git a/bitbake/lib/bb/fetch2/repo.py b/bitbake/lib/bb/fetch2/repo.py
deleted file mode 100644
index 54130a8c3b..0000000000
--- a/bitbake/lib/bb/fetch2/repo.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake "Fetch" repo (git) implementation
-
-"""
-
-# Copyright (C) 2009 Tom Rini <trini@embeddedalley.com>
-#
-# Based on git.py which is:
-#Copyright (C) 2005 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import runfetchcmd
-
-class Repo(FetchMethod):
- """Class to fetch a module or modules from repo (git) repositories"""
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with repo.
- """
- return ud.type in ["repo"]
-
- def urldata_init(self, ud, d):
- """
- We don"t care about the git rev of the manifests repository, but
- we do care about the manifest to use. The default is "default".
- We also care about the branch or tag to be used. The default is
- "master".
- """
-
- ud.proto = ud.parm.get('protocol', 'git')
- ud.branch = ud.parm.get('branch', 'master')
- ud.manifest = ud.parm.get('manifest', 'default.xml')
- if not ud.manifest.endswith('.xml'):
- ud.manifest += '.xml'
-
- ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d)
-
- def download(self, loc, ud, d):
- """Fetch url"""
-
- if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
- logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
- return
-
- gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
- repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo")
- codir = os.path.join(repodir, gitsrcname, ud.manifest)
-
- if ud.user:
- username = ud.user + "@"
- else:
- username = ""
-
- bb.mkdirhier(os.path.join(codir, "repo"))
- os.chdir(os.path.join(codir, "repo"))
- if not os.path.exists(os.path.join(codir, "repo", ".repo")):
- bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
- runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)
-
- bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
- runfetchcmd("repo sync", d)
- os.chdir(codir)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude '.repo' --exclude '.git'"
-
- # Create a cache
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
-
- def supports_srcrev(self):
- return False
-
- def _build_revision(self, url, ud, d):
- return ud.manifest
-
- def _want_sortable_revision(self, url, ud, d):
- return False
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py
deleted file mode 100644
index 91ac15faae..0000000000
--- a/bitbake/lib/bb/fetch2/ssh.py
+++ /dev/null
@@ -1,120 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-'''
-BitBake 'Fetch' implementations
-
-This implementation is for Secure Shell (SSH), and attempts to comply with the
-IETF secsh internet draft:
- http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/
-
- Currently does not support the sftp parameters, as this uses scp
- Also does not support the 'fingerprint' connection parameter.
-
-'''
-
-# Copyright (C) 2006 OpenedHand Ltd.
-#
-#
-# Based in part on svk.py:
-# Copyright (C) 2006 Holger Hans Peter Freyther
-# Based on svn.py:
-# Copyright (C) 2003, 2004 Chris Larson
-# Based on functions from the base bb module:
-# Copyright 2003 Holger Schurig
-#
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import re, os
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import logger
-from bb.fetch2 import runfetchcmd
-
-
-__pattern__ = re.compile(r'''
- \s* # Skip leading whitespace
- ssh:// # scheme
- ( # Optional username/password block
- (?P<user>\S+) # username
- (:(?P<pass>\S+))? # colon followed by the password (optional)
- )?
- (?P<cparam>(;[^;]+)*)? # connection parameters block (optional)
- @
- (?P<host>\S+?) # non-greedy match of the host
- (:(?P<port>[0-9]+))? # colon followed by the port (optional)
- /
- (?P<path>[^;]+) # path on the remote system, may be absolute or relative,
- # and may include the use of '~' to reference the remote home
- # directory
- (?P<sparam>(;[^;]+)*)? # parameters block (optional)
- $
-''', re.VERBOSE)
-
-class SSH(FetchMethod):
- '''Class to fetch a module or modules via Secure Shell'''
-
- def supports(self, url, urldata, d):
- return __pattern__.match(url) != None
-
- def localpath(self, url, urldata, d):
- m = __pattern__.match(urldata.url)
- path = m.group('path')
- host = m.group('host')
- lpath = os.path.join(data.getVar('DL_DIR', d, True), host, os.path.basename(path))
- return lpath
-
- def download(self, url, urldata, d):
- dldir = data.getVar('DL_DIR', d, True)
-
- m = __pattern__.match(url)
- path = m.group('path')
- host = m.group('host')
- port = m.group('port')
- user = m.group('user')
- password = m.group('pass')
-
- ldir = os.path.join(dldir, host)
- lpath = os.path.join(ldir, os.path.basename(path))
-
- if not os.path.exists(ldir):
- os.makedirs(ldir)
-
- if port:
- port = '-P %s' % port
- else:
- port = ''
-
- if user:
- fr = user
- if password:
- fr += ':%s' % password
- fr += '@%s' % host
- else:
- fr = host
- fr += ':%s' % path
-
-
- import commands
- cmd = 'scp -B -r %s %s %s/' % (
- port,
- commands.mkarg(fr),
- commands.mkarg(ldir)
- )
-
- bb.fetch2.check_network_access(d, cmd, urldata.url)
-
- runfetchcmd(cmd, d)
-
diff --git a/bitbake/lib/bb/fetch2/svk.py b/bitbake/lib/bb/fetch2/svk.py
deleted file mode 100644
index 6211cac8d3..0000000000
--- a/bitbake/lib/bb/fetch2/svk.py
+++ /dev/null
@@ -1,97 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-This implementation is for svk. It is based on the svn implementation
-
-"""
-
-# Copyright (C) 2006 Holger Hans Peter Freyther
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import logging
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import MissingParameterError
-from bb.fetch2 import logger
-from bb.fetch2 import runfetchcmd
-
-class Svk(FetchMethod):
- """Class to fetch a module or modules from svk repositories"""
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with svk.
- """
- return ud.type in ['svk']
-
- def urldata_init(self, ud, d):
-
- if not "module" in ud.parm:
- raise MissingParameterError('module', ud.url)
- else:
- ud.module = ud.parm["module"]
-
- ud.revision = ud.parm.get('rev', "")
-
- ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
-
- def need_update(self, url, ud, d):
- if ud.date == "now":
- return True
- if not os.path.exists(ud.localpath):
- return True
- return False
-
- def download(self, loc, ud, d):
- """Fetch urls"""
-
- svkroot = ud.host + ud.path
-
- svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module)
-
- if ud.revision:
- svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module)
-
- # create temp directory
- localdata = data.createCopy(d)
- data.update_data(localdata)
- logger.debug(2, "Fetch: creating temporary directory")
- bb.mkdirhier(data.expand('${WORKDIR}', localdata))
- data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
- tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
- tmpfile = tmppipe.readline().strip()
- if not tmpfile:
- logger.error()
- raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)
-
- # check out sources there
- os.chdir(tmpfile)
- logger.info("Fetch " + loc)
- logger.debug(1, "Running %s", svkcmd)
- runfetchcmd(svkcmd, d, cleanup = [tmpfile])
-
- os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
- # tar them up to a defined filename
- runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)), d, cleanup = [ud.localpath])
-
- # cleanup
- bb.utils.prunedir(tmpfile)
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py
deleted file mode 100644
index ac4fd27e14..0000000000
--- a/bitbake/lib/bb/fetch2/svn.py
+++ /dev/null
@@ -1,180 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementation for svn.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2004 Marcin Juszkiewicz
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import sys
-import logging
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import MissingParameterError
-from bb.fetch2 import runfetchcmd
-from bb.fetch2 import logger
-
-class Svn(FetchMethod):
- """Class to fetch a module or modules from svn repositories"""
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with svn.
- """
- return ud.type in ['svn']
-
- def urldata_init(self, ud, d):
- """
- init svn specific variable within url data
- """
- if not "module" in ud.parm:
- raise MissingParameterError('module', ud.url)
-
- ud.module = ud.parm["module"]
-
- # Create paths to svn checkouts
- relpath = self._strip_leading_slashes(ud.path)
- ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
- ud.moddir = os.path.join(ud.pkgdir, ud.module)
-
- if 'rev' in ud.parm:
- ud.revision = ud.parm['rev']
-
- ud.localfile = data.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
-
- def _buildsvncommand(self, ud, d, command):
- """
- Build up an svn commandline based on ud
- command is "fetch", "update", "info"
- """
-
- basecmd = data.expand('${FETCHCMD_svn}', d)
-
- proto = ud.parm.get('proto', 'svn')
-
- svn_rsh = None
- if proto == "svn+ssh" and "rsh" in ud.parm:
- svn_rsh = ud.parm["rsh"]
-
- svnroot = ud.host + ud.path
-
- options = []
-
- if ud.user:
- options.append("--username %s" % ud.user)
-
- if ud.pswd:
- options.append("--password %s" % ud.pswd)
-
- if command is "info":
- svncmd = "%s info %s %s://%s/%s/" % (basecmd, " ".join(options), proto, svnroot, ud.module)
- else:
- suffix = ""
- if ud.revision:
- options.append("-r %s" % ud.revision)
- suffix = "@%s" % (ud.revision)
-
- if command is "fetch":
- svncmd = "%s co %s %s://%s/%s%s %s" % (basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module)
- elif command is "update":
- svncmd = "%s update %s" % (basecmd, " ".join(options))
- else:
- raise FetchError("Invalid svn command %s" % command, ud.url)
-
- if svn_rsh:
- svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
-
- return svncmd
-
- def download(self, loc, ud, d):
- """Fetch url"""
-
- logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
-
- if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
- svnupdatecmd = self._buildsvncommand(ud, d, "update")
- logger.info("Update " + loc)
- # update sources there
- os.chdir(ud.moddir)
- logger.debug(1, "Running %s", svnupdatecmd)
- bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
- runfetchcmd(svnupdatecmd, d)
- else:
- svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
- logger.info("Fetch " + loc)
- # check out sources there
- bb.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- logger.debug(1, "Running %s", svnfetchcmd)
- bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
- runfetchcmd(svnfetchcmd, d)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude '.svn'"
-
- os.chdir(ud.pkgdir)
- # tar them up to a defined filename
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
-
- def clean(self, ud, d):
- """ Clean SVN specific files and dirs """
-
- bb.utils.remove(ud.localpath)
- bb.utils.remove(ud.moddir, True)
-
-
- def supports_srcrev(self):
- return True
-
- def _revision_key(self, url, ud, d, name):
- """
- Return a unique key for the url
- """
- return "svn:" + ud.moddir
-
- def _latest_revision(self, url, ud, d, name):
- """
- Return the latest upstream revision number
- """
- bb.fetch2.check_network_access(d, self._buildsvncommand(ud, d, "info"))
-
- output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True)
-
- revision = None
- for line in output.splitlines():
- if "Last Changed Rev" in line:
- revision = line.split(":")[1].strip()
-
- return revision
-
- def _sortable_revision(self, url, ud, d):
- """
- Return a sortable revision number which in our case is the revision number
- """
-
- return self._build_revision(url, ud, d)
-
- def _build_revision(self, url, ud, d):
- return ud.revision
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
deleted file mode 100644
index 7bd027adc5..0000000000
--- a/bitbake/lib/bb/fetch2/wget.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import logging
-import bb
-import urllib
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import encodeurl
-from bb.fetch2 import decodeurl
-from bb.fetch2 import logger
-from bb.fetch2 import runfetchcmd
-
-class Wget(FetchMethod):
- """Class to fetch urls via 'wget'"""
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with wget.
- """
- return ud.type in ['http', 'https', 'ftp']
-
- def urldata_init(self, ud, d):
-
- ud.basename = os.path.basename(ud.path)
- ud.localfile = data.expand(urllib.unquote(ud.basename), d)
-
- def download(self, uri, ud, d, checkonly = False):
- """Fetch urls"""
-
- def fetch_uri(uri, ud, d):
- if checkonly:
- fetchcmd = data.getVar("CHECKCOMMAND", d, True)
- elif os.path.exists(ud.localpath):
- # file exists, but we didnt complete it.. trying again..
- fetchcmd = data.getVar("RESUMECOMMAND", d, True)
- else:
- fetchcmd = data.getVar("FETCHCOMMAND", d, True)
-
- uri = uri.split(";")[0]
- uri_decoded = list(decodeurl(uri))
- uri_type = uri_decoded[0]
- uri_host = uri_decoded[1]
-
- fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
- fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
- logger.info("fetch " + uri)
- logger.debug(2, "executing " + fetchcmd)
- bb.fetch2.check_network_access(d, fetchcmd)
- runfetchcmd(fetchcmd, d)
-
- # Sanity check since wget can pretend it succeed when it didn't
- # Also, this used to happen if sourceforge sent us to the mirror page
- if not os.path.exists(ud.localpath) and not checkonly:
- raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
-
- localdata = data.createCopy(d)
- data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata)
- data.update_data(localdata)
-
- fetch_uri(uri, ud, localdata)
-
- return True
-
- def checkstatus(self, uri, ud, d):
- return self.download(uri, ud, d, True)
diff --git a/bitbake/lib/bb/methodpool.py b/bitbake/lib/bb/methodpool.py
deleted file mode 100644
index 1485b1357d..0000000000
--- a/bitbake/lib/bb/methodpool.py
+++ /dev/null
@@ -1,84 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-#
-# Copyright (C) 2006 Holger Hans Peter Freyther
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-
-"""
- What is a method pool?
-
- BitBake has a global method scope where .bb, .inc and .bbclass
- files can install methods. These methods are parsed from strings.
- To avoid recompiling and executing these string we introduce
- a method pool to do this task.
-
- This pool will be used to compile and execute the functions. It
- will be smart enough to
-"""
-
-from bb.utils import better_compile, better_exec
-from bb import error
-
-# A dict of modules we have handled
-# it is the number of .bbclasses + x in size
-_parsed_methods = { }
-_parsed_fns = { }
-
-def insert_method(modulename, code, fn):
- """
- Add code of a module should be added. The methods
- will be simply added, no checking will be done
- """
- comp = better_compile(code, modulename, fn )
- better_exec(comp, None, code, fn)
-
- # now some instrumentation
- code = comp.co_names
- for name in code:
- if name in ['None', 'False']:
- continue
- elif name in _parsed_fns and not _parsed_fns[name] == modulename:
- error( "Error Method already seen: %s in' %s' now in '%s'" % (name, _parsed_fns[name], modulename))
- else:
- _parsed_fns[name] = modulename
-
-def check_insert_method(modulename, code, fn):
- """
- Add the code if it wasnt added before. The module
- name will be used for that
-
- Variables:
- @modulename a short name e.g. base.bbclass
- @code The actual python code
- @fn The filename from the outer file
- """
- if not modulename in _parsed_methods:
- return insert_method(modulename, code, fn)
- _parsed_methods[modulename] = 1
-
-def parsed_module(modulename):
- """
- Inform me file xyz was parsed
- """
- return modulename in _parsed_methods
-
-
-def get_parsed_dict():
- """
- shortcut
- """
- return _parsed_methods
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py
deleted file mode 100644
index 1f9ff904af..0000000000
--- a/bitbake/lib/bb/msg.py
+++ /dev/null
@@ -1,200 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'msg' implementation
-
-Message handling infrastructure for bitbake
-
-"""
-
-# Copyright (C) 2006 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import sys
-import logging
-import collections
-from itertools import groupby
-import warnings
-import bb
-import bb.event
-
-class BBLogFormatter(logging.Formatter):
- """Formatter which ensures that our 'plain' messages (logging.INFO + 1) are used as is"""
-
- DEBUG3 = logging.DEBUG - 2
- DEBUG2 = logging.DEBUG - 1
- DEBUG = logging.DEBUG
- VERBOSE = logging.INFO - 1
- NOTE = logging.INFO
- PLAIN = logging.INFO + 1
- ERROR = logging.ERROR
- WARNING = logging.WARNING
- CRITICAL = logging.CRITICAL
-
- levelnames = {
- DEBUG3 : 'DEBUG',
- DEBUG2 : 'DEBUG',
- DEBUG : 'DEBUG',
- VERBOSE: 'NOTE',
- NOTE : 'NOTE',
- PLAIN : '',
- WARNING : 'WARNING',
- ERROR : 'ERROR',
- CRITICAL: 'ERROR',
- }
-
- def getLevelName(self, levelno):
- try:
- return self.levelnames[levelno]
- except KeyError:
- self.levelnames[levelno] = value = 'Level %d' % levelno
- return value
-
- def format(self, record):
- record.levelname = self.getLevelName(record.levelno)
- if record.levelno == self.PLAIN:
- return record.getMessage()
- else:
- return logging.Formatter.format(self, record)
-
-class Loggers(dict):
- def __getitem__(self, key):
- if key in self:
- return dict.__getitem__(self, key)
- else:
- log = logging.getLogger("BitBake.%s" % domain._fields[key])
- dict.__setitem__(self, key, log)
- return log
-
-class DebugLevel(dict):
- def __getitem__(self, key):
- if key == "default":
- key = domain.Default
- return get_debug_level(key)
-
-def _NamedTuple(name, fields):
- Tuple = collections.namedtuple(name, " ".join(fields))
- return Tuple(*range(len(fields)))
-
-domain = _NamedTuple("Domain", (
- "Default",
- "Build",
- "Cache",
- "Collection",
- "Data",
- "Depends",
- "Fetcher",
- "Parsing",
- "PersistData",
- "Provider",
- "RunQueue",
- "TaskData",
- "Util"))
-logger = logging.getLogger("BitBake")
-loggers = Loggers()
-debug_level = DebugLevel()
-
-# Message control functions
-#
-
-def set_debug_level(level):
- for log in loggers.itervalues():
- log.setLevel(logging.NOTSET)
-
- if level:
- logger.setLevel(logging.DEBUG - level + 1)
- else:
- logger.setLevel(logging.INFO)
-
-def get_debug_level(msgdomain = domain.Default):
- if not msgdomain:
- level = logger.getEffectiveLevel()
- else:
- level = loggers[msgdomain].getEffectiveLevel()
- return max(0, logging.DEBUG - level + 1)
-
-def set_verbose(level):
- if level:
- logger.setLevel(BBLogFormatter.VERBOSE)
- else:
- logger.setLevel(BBLogFormatter.INFO)
-
-def set_debug_domains(domainargs):
- for (domainarg, iterator) in groupby(domainargs):
- for index, msgdomain in enumerate(domain._fields):
- if msgdomain == domainarg:
- level = len(tuple(iterator))
- if level:
- loggers[index].setLevel(logging.DEBUG - level + 1)
- break
- else:
- warn(None, "Logging domain %s is not valid, ignoring" % domainarg)
-
-#
-# Message handling functions
-#
-
-def debug(level, msgdomain, msg):
- warnings.warn("bb.msg.debug will soon be deprecated in favor of the python 'logging' module",
- PendingDeprecationWarning, stacklevel=2)
- level = logging.DEBUG - (level - 1)
- if not msgdomain:
- logger.debug(level, msg)
- else:
- loggers[msgdomain].debug(level, msg)
-
-def plain(msg):
- warnings.warn("bb.msg.plain will soon be deprecated in favor of the python 'logging' module",
- PendingDeprecationWarning, stacklevel=2)
- logger.plain(msg)
-
-def note(level, msgdomain, msg):
- warnings.warn("bb.msg.note will soon be deprecated in favor of the python 'logging' module",
- PendingDeprecationWarning, stacklevel=2)
- if level > 1:
- if msgdomain:
- logger.verbose(msg)
- else:
- loggers[msgdomain].verbose(msg)
- else:
- if msgdomain:
- logger.info(msg)
- else:
- loggers[msgdomain].info(msg)
-
-def warn(msgdomain, msg):
- warnings.warn("bb.msg.warn will soon be deprecated in favor of the python 'logging' module",
- PendingDeprecationWarning, stacklevel=2)
- if not msgdomain:
- logger.warn(msg)
- else:
- loggers[msgdomain].warn(msg)
-
-def error(msgdomain, msg):
- warnings.warn("bb.msg.error will soon be deprecated in favor of the python 'logging' module",
- PendingDeprecationWarning, stacklevel=2)
- if not msgdomain:
- logger.error(msg)
- else:
- loggers[msgdomain].error(msg)
-
-def fatal(msgdomain, msg):
- warnings.warn("bb.msg.fatal will soon be deprecated in favor of raising appropriate exceptions",
- PendingDeprecationWarning, stacklevel=2)
- if not msgdomain:
- logger.critical(msg)
- else:
- loggers[msgdomain].critical(msg)
- sys.exit(1)
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py
deleted file mode 100644
index eee8d9cddb..0000000000
--- a/bitbake/lib/bb/parse/__init__.py
+++ /dev/null
@@ -1,123 +0,0 @@
-"""
-BitBake Parsers
-
-File parsers for the BitBake build tools.
-
-"""
-
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-handlers = []
-
-import os
-import stat
-import logging
-import bb
-import bb.utils
-import bb.siggen
-
-logger = logging.getLogger("BitBake.Parsing")
-
-class ParseError(Exception):
- """Exception raised when parsing fails"""
-
-class SkipPackage(Exception):
- """Exception raised to skip this package"""
-
-__mtime_cache = {}
-def cached_mtime(f):
- if f not in __mtime_cache:
- __mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
- return __mtime_cache[f]
-
-def cached_mtime_noerror(f):
- if f not in __mtime_cache:
- try:
- __mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
- except OSError:
- return 0
- return __mtime_cache[f]
-
-def update_mtime(f):
- __mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
- return __mtime_cache[f]
-
-def mark_dependency(d, f):
- if f.startswith('./'):
- f = "%s/%s" % (os.getcwd(), f[2:])
- deps = bb.data.getVar('__depends', d) or set()
- deps.update([(f, cached_mtime(f))])
- bb.data.setVar('__depends', deps, d)
-
-def supports(fn, data):
- """Returns true if we have a handler for this file, false otherwise"""
- for h in handlers:
- if h['supports'](fn, data):
- return 1
- return 0
-
-def handle(fn, data, include = 0):
- """Call the handler that is appropriate for this file"""
- for h in handlers:
- if h['supports'](fn, data):
- return h['handle'](fn, data, include)
- raise ParseError("%s is not a BitBake file" % fn)
-
-def init(fn, data):
- for h in handlers:
- if h['supports'](fn):
- return h['init'](data)
-
-def init_parser(d):
- bb.parse.siggen = bb.siggen.init(d)
-
-def resolve_file(fn, d):
- if not os.path.isabs(fn):
- bbpath = bb.data.getVar("BBPATH", d, True)
- newfn = bb.utils.which(bbpath, fn)
- if not newfn:
- raise IOError("file %s not found in %s" % (fn, bbpath))
- fn = newfn
-
- logger.debug(2, "LOAD %s", fn)
- return fn
-
-# Used by OpenEmbedded metadata
-__pkgsplit_cache__={}
-def vars_from_file(mypkg, d):
- if not mypkg:
- return (None, None, None)
- if mypkg in __pkgsplit_cache__:
- return __pkgsplit_cache__[mypkg]
-
- myfile = os.path.splitext(os.path.basename(mypkg))
- parts = myfile[0].split('_')
- __pkgsplit_cache__[mypkg] = parts
- if len(parts) > 3:
- raise ParseError("Unable to generate default variables from the filename: %s (too many underscores)" % mypkg)
- exp = 3 - len(parts)
- tmplist = []
- while exp != 0:
- exp -= 1
- tmplist.append(None)
- parts.extend(tmplist)
- return parts
-
-from bb.parse.parse_py import __version__, ConfHandler, BBHandler
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py
deleted file mode 100644
index b968db40b3..0000000000
--- a/bitbake/lib/bb/parse/ast.py
+++ /dev/null
@@ -1,446 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
- AbstractSyntaxTree classes for the Bitbake language
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-# Copyright (C) 2009 Holger Hans Peter Freyther
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-from __future__ import absolute_import
-from future_builtins import filter
-import re
-import string
-import logging
-import bb
-import itertools
-from bb import methodpool
-from bb.parse import logger
-
-__parsed_methods__ = bb.methodpool.get_parsed_dict()
-_bbversions_re = re.compile(r"\[(?P<from>[0-9]+)-(?P<to>[0-9]+)\]")
-
-class StatementGroup(list):
- def eval(self, data):
- for statement in self:
- statement.eval(data)
-
-class AstNode(object):
- def __init__(self, filename, lineno):
- self.filename = filename
- self.lineno = lineno
-
-class IncludeNode(AstNode):
- def __init__(self, filename, lineno, what_file, force):
- AstNode.__init__(self, filename, lineno)
- self.what_file = what_file
- self.force = force
-
- def eval(self, data):
- """
- Include the file and evaluate the statements
- """
- s = bb.data.expand(self.what_file, data)
- logger.debug(2, "CONF %s:%s: including %s", self.filename, self.lineno, s)
-
- # TODO: Cache those includes... maybe not here though
- if self.force:
- bb.parse.ConfHandler.include(self.filename, s, data, "include required")
- else:
- bb.parse.ConfHandler.include(self.filename, s, data, False)
-
-class ExportNode(AstNode):
- def __init__(self, filename, lineno, var):
- AstNode.__init__(self, filename, lineno)
- self.var = var
-
- def eval(self, data):
- bb.data.setVarFlag(self.var, "export", 1, data)
-
-class DataNode(AstNode):
- """
- Various data related updates. For the sake of sanity
- we have one class doing all this. This means that all
- this need to be re-evaluated... we might be able to do
- that faster with multiple classes.
- """
- def __init__(self, filename, lineno, groupd):
- AstNode.__init__(self, filename, lineno)
- self.groupd = groupd
-
- def getFunc(self, key, data):
- if 'flag' in self.groupd and self.groupd['flag'] != None:
- return bb.data.getVarFlag(key, self.groupd['flag'], data)
- else:
- return bb.data.getVar(key, data)
-
- def eval(self, data):
- groupd = self.groupd
- key = groupd["var"]
- if "exp" in groupd and groupd["exp"] != None:
- bb.data.setVarFlag(key, "export", 1, data)
- if "ques" in groupd and groupd["ques"] != None:
- val = self.getFunc(key, data)
- if val == None:
- val = groupd["value"]
- elif "colon" in groupd and groupd["colon"] != None:
- e = data.createCopy()
- bb.data.update_data(e)
- val = bb.data.expand(groupd["value"], e)
- elif "append" in groupd and groupd["append"] != None:
- val = "%s %s" % ((self.getFunc(key, data) or ""), groupd["value"])
- elif "prepend" in groupd and groupd["prepend"] != None:
- val = "%s %s" % (groupd["value"], (self.getFunc(key, data) or ""))
- elif "postdot" in groupd and groupd["postdot"] != None:
- val = "%s%s" % ((self.getFunc(key, data) or ""), groupd["value"])
- elif "predot" in groupd and groupd["predot"] != None:
- val = "%s%s" % (groupd["value"], (self.getFunc(key, data) or ""))
- else:
- val = groupd["value"]
-
- if 'flag' in groupd and groupd['flag'] != None:
- bb.data.setVarFlag(key, groupd['flag'], val, data)
- elif groupd["lazyques"]:
- bb.data.setVarFlag(key, "defaultval", val, data)
- else:
- bb.data.setVar(key, val, data)
-
-class MethodNode(AstNode):
- def __init__(self, filename, lineno, func_name, body):
- AstNode.__init__(self, filename, lineno)
- self.func_name = func_name
- self.body = body
-
- def eval(self, data):
- if self.func_name == "__anonymous":
- funcname = ("__anon_%s_%s" % (self.lineno, self.filename.translate(string.maketrans('/.+-', '____'))))
- if not funcname in bb.methodpool._parsed_fns:
- text = "def %s(d):\n" % (funcname) + '\n'.join(self.body)
- bb.methodpool.insert_method(funcname, text, self.filename)
- anonfuncs = bb.data.getVar('__BBANONFUNCS', data) or []
- anonfuncs.append(funcname)
- bb.data.setVar('__BBANONFUNCS', anonfuncs, data)
- else:
- bb.data.setVarFlag(self.func_name, "func", 1, data)
- bb.data.setVar(self.func_name, '\n'.join(self.body), data)
-
-class PythonMethodNode(AstNode):
- def __init__(self, filename, lineno, function, define, body):
- AstNode.__init__(self, filename, lineno)
- self.function = function
- self.define = define
- self.body = body
-
- def eval(self, data):
- # Note we will add root to parsedmethods after having parse
- # 'this' file. This means we will not parse methods from
- # bb classes twice
- text = '\n'.join(self.body)
- if not bb.methodpool.parsed_module(self.define):
- bb.methodpool.insert_method(self.define, text, self.filename)
- bb.data.setVarFlag(self.function, "func", 1, data)
- bb.data.setVarFlag(self.function, "python", 1, data)
- bb.data.setVar(self.function, text, data)
-
-class MethodFlagsNode(AstNode):
- def __init__(self, filename, lineno, key, m):
- AstNode.__init__(self, filename, lineno)
- self.key = key
- self.m = m
-
- def eval(self, data):
- if bb.data.getVar(self.key, data):
- # clean up old version of this piece of metadata, as its
- # flags could cause problems
- bb.data.setVarFlag(self.key, 'python', None, data)
- bb.data.setVarFlag(self.key, 'fakeroot', None, data)
- if self.m.group("py") is not None:
- bb.data.setVarFlag(self.key, "python", "1", data)
- else:
- bb.data.delVarFlag(self.key, "python", data)
- if self.m.group("fr") is not None:
- bb.data.setVarFlag(self.key, "fakeroot", "1", data)
- else:
- bb.data.delVarFlag(self.key, "fakeroot", data)
-
-class ExportFuncsNode(AstNode):
- def __init__(self, filename, lineno, fns, classes):
- AstNode.__init__(self, filename, lineno)
- self.n = fns.split()
- self.classes = classes
-
- def eval(self, data):
- for f in self.n:
- allvars = []
- allvars.append(f)
- allvars.append(self.classes[-1] + "_" + f)
-
- vars = [[ allvars[0], allvars[1] ]]
- if len(self.classes) > 1 and self.classes[-2] is not None:
- allvars.append(self.classes[-2] + "_" + f)
- vars = []
- vars.append([allvars[2], allvars[1]])
- vars.append([allvars[0], allvars[2]])
-
- for (var, calledvar) in vars:
- if bb.data.getVar(var, data) and not bb.data.getVarFlag(var, 'export_func', data):
- continue
-
- if bb.data.getVar(var, data):
- bb.data.setVarFlag(var, 'python', None, data)
- bb.data.setVarFlag(var, 'func', None, data)
-
- for flag in [ "func", "python" ]:
- if bb.data.getVarFlag(calledvar, flag, data):
- bb.data.setVarFlag(var, flag, bb.data.getVarFlag(calledvar, flag, data), data)
- for flag in [ "dirs" ]:
- if bb.data.getVarFlag(var, flag, data):
- bb.data.setVarFlag(calledvar, flag, bb.data.getVarFlag(var, flag, data), data)
-
- if bb.data.getVarFlag(calledvar, "python", data):
- bb.data.setVar(var, "\tbb.build.exec_func('" + calledvar + "', d)\n", data)
- else:
- bb.data.setVar(var, "\t" + calledvar + "\n", data)
- bb.data.setVarFlag(var, 'export_func', '1', data)
-
-class AddTaskNode(AstNode):
- def __init__(self, filename, lineno, func, before, after):
- AstNode.__init__(self, filename, lineno)
- self.func = func
- self.before = before
- self.after = after
-
- def eval(self, data):
- var = self.func
- if self.func[:3] != "do_":
- var = "do_" + self.func
-
- bb.data.setVarFlag(var, "task", 1, data)
- bbtasks = bb.data.getVar('__BBTASKS', data) or []
- if not var in bbtasks:
- bbtasks.append(var)
- bb.data.setVar('__BBTASKS', bbtasks, data)
-
- existing = bb.data.getVarFlag(var, "deps", data) or []
- if self.after is not None:
- # set up deps for function
- for entry in self.after.split():
- if entry not in existing:
- existing.append(entry)
- bb.data.setVarFlag(var, "deps", existing, data)
- if self.before is not None:
- # set up things that depend on this func
- for entry in self.before.split():
- existing = bb.data.getVarFlag(entry, "deps", data) or []
- if var not in existing:
- bb.data.setVarFlag(entry, "deps", [var] + existing, data)
-
-class BBHandlerNode(AstNode):
- def __init__(self, filename, lineno, fns):
- AstNode.__init__(self, filename, lineno)
- self.hs = fns.split()
-
- def eval(self, data):
- bbhands = bb.data.getVar('__BBHANDLERS', data) or []
- for h in self.hs:
- bbhands.append(h)
- bb.data.setVarFlag(h, "handler", 1, data)
- bb.data.setVar('__BBHANDLERS', bbhands, data)
-
-class InheritNode(AstNode):
- def __init__(self, filename, lineno, classes):
- AstNode.__init__(self, filename, lineno)
- self.classes = classes
-
- def eval(self, data):
- bb.parse.BBHandler.inherit(self.classes, data)
-
-def handleInclude(statements, filename, lineno, m, force):
- statements.append(IncludeNode(filename, lineno, m.group(1), force))
-
-def handleExport(statements, filename, lineno, m):
- statements.append(ExportNode(filename, lineno, m.group(1)))
-
-def handleData(statements, filename, lineno, groupd):
- statements.append(DataNode(filename, lineno, groupd))
-
-def handleMethod(statements, filename, lineno, func_name, body):
- statements.append(MethodNode(filename, lineno, func_name, body))
-
-def handlePythonMethod(statements, filename, lineno, funcname, root, body):
- statements.append(PythonMethodNode(filename, lineno, funcname, root, body))
-
-def handleMethodFlags(statements, filename, lineno, key, m):
- statements.append(MethodFlagsNode(filename, lineno, key, m))
-
-def handleExportFuncs(statements, filename, lineno, m, classes):
- statements.append(ExportFuncsNode(filename, lineno, m.group(1), classes))
-
-def handleAddTask(statements, filename, lineno, m):
- func = m.group("func")
- before = m.group("before")
- after = m.group("after")
- if func is None:
- return
-
- statements.append(AddTaskNode(filename, lineno, func, before, after))
-
-def handleBBHandlers(statements, filename, lineno, m):
- statements.append(BBHandlerNode(filename, lineno, m.group(1)))
-
-def handleInherit(statements, filename, lineno, m):
- classes = m.group(1)
- statements.append(InheritNode(filename, lineno, classes.split()))
-
-def finalize(fn, d, variant = None):
- bb.data.expandKeys(d)
- bb.data.update_data(d)
- code = []
- for funcname in bb.data.getVar("__BBANONFUNCS", d) or []:
- code.append("%s(d)" % funcname)
- bb.utils.simple_exec("\n".join(code), {"d": d})
- bb.data.update_data(d)
-
- all_handlers = {}
- for var in bb.data.getVar('__BBHANDLERS', d) or []:
- # try to add the handler
- handler = bb.data.getVar(var, d)
- bb.event.register(var, handler)
-
- tasklist = bb.data.getVar('__BBTASKS', d) or []
- bb.build.add_tasks(tasklist, d)
-
- bb.parse.siggen.finalise(fn, d, variant)
-
- bb.event.fire(bb.event.RecipeParsed(fn), d)
-
-def _create_variants(datastores, names, function):
- def create_variant(name, orig_d, arg = None):
- new_d = bb.data.createCopy(orig_d)
- function(arg or name, new_d)
- datastores[name] = new_d
-
- for variant, variant_d in datastores.items():
- for name in names:
- if not variant:
- # Based on main recipe
- create_variant(name, variant_d)
- else:
- create_variant("%s-%s" % (variant, name), variant_d, name)
-
-def _expand_versions(versions):
- def expand_one(version, start, end):
- for i in xrange(start, end + 1):
- ver = _bbversions_re.sub(str(i), version, 1)
- yield ver
-
- versions = iter(versions)
- while True:
- try:
- version = next(versions)
- except StopIteration:
- break
-
- range_ver = _bbversions_re.search(version)
- if not range_ver:
- yield version
- else:
- newversions = expand_one(version, int(range_ver.group("from")),
- int(range_ver.group("to")))
- versions = itertools.chain(newversions, versions)
-
-def multi_finalize(fn, d):
- appends = (d.getVar("__BBAPPEND", True) or "").split()
- for append in appends:
- logger.debug(2, "Appending .bbappend file %s to %s", append, fn)
- bb.parse.BBHandler.handle(append, d, True)
-
- safe_d = d
- d = bb.data.createCopy(safe_d)
- try:
- finalize(fn, d)
- except bb.parse.SkipPackage:
- bb.data.setVar("__SKIPPED", True, d)
- datastores = {"": safe_d}
-
- versions = (d.getVar("BBVERSIONS", True) or "").split()
- if versions:
- pv = orig_pv = d.getVar("PV", True)
- baseversions = {}
-
- def verfunc(ver, d, pv_d = None):
- if pv_d is None:
- pv_d = d
-
- overrides = d.getVar("OVERRIDES", True).split(":")
- pv_d.setVar("PV", ver)
- overrides.append(ver)
- bpv = baseversions.get(ver) or orig_pv
- pv_d.setVar("BPV", bpv)
- overrides.append(bpv)
- d.setVar("OVERRIDES", ":".join(overrides))
-
- versions = list(_expand_versions(versions))
- for pos, version in enumerate(list(versions)):
- try:
- pv, bpv = version.split(":", 2)
- except ValueError:
- pass
- else:
- versions[pos] = pv
- baseversions[pv] = bpv
-
- if pv in versions and not baseversions.get(pv):
- versions.remove(pv)
- else:
- pv = versions.pop()
-
- # This is necessary because our existing main datastore
- # has already been finalized with the old PV, we need one
- # that's been finalized with the new PV.
- d = bb.data.createCopy(safe_d)
- verfunc(pv, d, safe_d)
- try:
- finalize(fn, d)
- except bb.parse.SkipPackage:
- bb.data.setVar("__SKIPPED", True, d)
-
- _create_variants(datastores, versions, verfunc)
-
- extended = d.getVar("BBCLASSEXTEND", True) or ""
- if extended:
- pn = d.getVar("PN", True)
- def extendfunc(name, d):
- d.setVar("PN", "%s-%s" % (pn, name))
- bb.parse.BBHandler.inherit([name], d)
-
- safe_d.setVar("BBCLASSEXTEND", extended)
- _create_variants(datastores, extended.split(), extendfunc)
-
- for variant, variant_d in datastores.iteritems():
- if variant:
- try:
- finalize(fn, variant_d, variant)
- except bb.parse.SkipPackage:
- bb.data.setVar("__SKIPPED", True, variant_d)
-
- if len(datastores) > 1:
- variants = filter(None, datastores.iterkeys())
- safe_d.setVar("__VARIANTS", " ".join(variants))
-
- datastores[""] = d
- return datastores
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py
deleted file mode 100644
index 402cd07e2a..0000000000
--- a/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ /dev/null
@@ -1,254 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
- class for handling .bb files
-
- Reads a .bb file and obtains its metadata
-
-"""
-
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-from __future__ import absolute_import
-import re, bb, os
-import logging
-import bb.build, bb.utils
-from bb import data
-
-from . import ConfHandler
-from .. import resolve_file, ast, logger
-from .ConfHandler import include, init
-
-# For compatibility
-bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"])
-
-__func_start_regexp__ = re.compile( r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
-__inherit_regexp__ = re.compile( r"inherit\s+(.+)" )
-__export_func_regexp__ = re.compile( r"EXPORT_FUNCTIONS\s+(.+)" )
-__addtask_regexp__ = re.compile("addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
-__addhandler_regexp__ = re.compile( r"addhandler\s+(.+)" )
-__def_regexp__ = re.compile( r"def\s+(\w+).*:" )
-__python_func_regexp__ = re.compile( r"(\s+.*)|(^$)" )
-
-
-__infunc__ = ""
-__inpython__ = False
-__body__ = []
-__classname__ = ""
-classes = [ None, ]
-
-cached_statements = {}
-
-# We need to indicate EOF to the feeder. This code is so messy that
-# factoring it out to a close_parse_file method is out of question.
-# We will use the IN_PYTHON_EOF as an indicator to just close the method
-#
-# The two parts using it are tightly integrated anyway
-IN_PYTHON_EOF = -9999999999999
-
-
-
-def supports(fn, d):
- """Return True if fn has a supported extension"""
- return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
-
-def inherit(files, d):
- __inherit_cache = data.getVar('__inherit_cache', d) or []
- fn = ""
- lineno = 0
- for file in files:
- file = data.expand(file, d)
- if not os.path.isabs(file) and not file.endswith(".bbclass"):
- file = os.path.join('classes', '%s.bbclass' % file)
-
- if not file in __inherit_cache:
- logger.log(logging.DEBUG -1, "BB %s:%d: inheriting %s", fn, lineno, file)
- __inherit_cache.append( file )
- data.setVar('__inherit_cache', __inherit_cache, d)
- include(fn, file, d, "inherit")
- __inherit_cache = data.getVar('__inherit_cache', d) or []
-
-def get_statements(filename, absolute_filename, base_name):
- global cached_statements
-
- try:
- return cached_statements[absolute_filename]
- except KeyError:
- file = open(absolute_filename, 'r')
- statements = ast.StatementGroup()
-
- lineno = 0
- while True:
- lineno = lineno + 1
- s = file.readline()
- if not s: break
- s = s.rstrip()
- feeder(lineno, s, filename, base_name, statements)
- if __inpython__:
- # add a blank line to close out any python definition
- feeder(IN_PYTHON_EOF, "", filename, base_name, statements)
-
- if filename.endswith(".bbclass") or filename.endswith(".inc"):
- cached_statements[absolute_filename] = statements
- return statements
-
-def handle(fn, d, include):
- global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__
- __body__ = []
- __infunc__ = ""
- __classname__ = ""
- __residue__ = []
-
-
- if include == 0:
- logger.debug(2, "BB %s: handle(data)", fn)
- else:
- logger.debug(2, "BB %s: handle(data, include)", fn)
-
- base_name = os.path.basename(fn)
- (root, ext) = os.path.splitext(base_name)
- init(d)
-
- if ext == ".bbclass":
- __classname__ = root
- classes.append(__classname__)
- __inherit_cache = data.getVar('__inherit_cache', d) or []
- if not fn in __inherit_cache:
- __inherit_cache.append(fn)
- data.setVar('__inherit_cache', __inherit_cache, d)
-
- if include != 0:
- oldfile = data.getVar('FILE', d)
- else:
- oldfile = None
-
- abs_fn = resolve_file(fn, d)
-
- if include:
- bb.parse.mark_dependency(d, abs_fn)
-
- # actual loading
- statements = get_statements(fn, abs_fn, base_name)
-
- # DONE WITH PARSING... time to evaluate
- if ext != ".bbclass":
- data.setVar('FILE', fn, d)
-
- statements.eval(d)
-
- if ext == ".bbclass":
- classes.remove(__classname__)
- else:
- if include == 0:
- return ast.multi_finalize(fn, d)
-
- if oldfile:
- bb.data.setVar("FILE", oldfile, d)
-
- # we have parsed the bb class now
- if ext == ".bbclass" or ext == ".inc":
- bb.methodpool.get_parsed_dict()[base_name] = 1
-
- return d
-
-def feeder(lineno, s, fn, root, statements):
- global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, classes, bb, __residue__
- if __infunc__:
- if s == '}':
- __body__.append('')
- ast.handleMethod(statements, fn, lineno, __infunc__, __body__)
- __infunc__ = ""
- __body__ = []
- else:
- __body__.append(s)
- return
-
- if __inpython__:
- m = __python_func_regexp__.match(s)
- if m and lineno != IN_PYTHON_EOF:
- __body__.append(s)
- return
- else:
- ast.handlePythonMethod(statements, fn, lineno, __inpython__,
- root, __body__)
- __body__ = []
- __inpython__ = False
-
- if lineno == IN_PYTHON_EOF:
- return
-
-
- # Skip empty lines
- if s == '':
- return
-
- if s[0] == '#':
- if len(__residue__) != 0 and __residue__[0][0] != "#":
- bb.error("There is a comment on line %s of file %s (%s) which is in the middle of a multiline expression.\nBitbake used to ignore these but no longer does so, please fix your metadata as errors are likely as a result of this change." % (lineno, fn, s))
-
- if s[-1] == '\\':
- __residue__.append(s[:-1])
- return
-
- s = "".join(__residue__) + s
- __residue__ = []
-
- # Skip comments
- if s[0] == '#':
- return
-
- m = __func_start_regexp__.match(s)
- if m:
- __infunc__ = m.group("func") or "__anonymous"
- ast.handleMethodFlags(statements, fn, lineno, __infunc__, m)
- return
-
- m = __def_regexp__.match(s)
- if m:
- __body__.append(s)
- __inpython__ = m.group(1)
-
- return
-
- m = __export_func_regexp__.match(s)
- if m:
- ast.handleExportFuncs(statements, fn, lineno, m, classes)
- return
-
- m = __addtask_regexp__.match(s)
- if m:
- ast.handleAddTask(statements, fn, lineno, m)
- return
-
- m = __addhandler_regexp__.match(s)
- if m:
- ast.handleBBHandlers(statements, fn, lineno, m)
- return
-
- m = __inherit_regexp__.match(s)
- if m:
- ast.handleInherit(statements, fn, lineno, m)
- return
-
- return ConfHandler.feeder(lineno, s, fn, statements)
-
-# Add us to the handlers list
-from .. import handlers
-handlers.append({'supports': supports, 'handle': handle, 'init': init})
-del handlers
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
deleted file mode 100644
index fc239a3540..0000000000
--- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ /dev/null
@@ -1,139 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
- class for handling configuration data files
-
- Reads a .conf file and obtains its metadata
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import re, bb.data, os
-import logging
-import bb.utils
-from bb.parse import ParseError, resolve_file, ast, logger
-
-#__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}]+)\s*(?P<colon>:)?(?P<ques>\?)?=\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
-__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}/]+)(\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?\s*((?P<colon>:=)|(?P<lazyques>\?\?=)|(?P<ques>\?=)|(?P<append>\+=)|(?P<prepend>=\+)|(?P<predot>=\.)|(?P<postdot>\.=)|=)\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
-__include_regexp__ = re.compile( r"include\s+(.+)" )
-__require_regexp__ = re.compile( r"require\s+(.+)" )
-__export_regexp__ = re.compile( r"export\s+(.+)" )
-
-def init(data):
- topdir = bb.data.getVar('TOPDIR', data)
- if not topdir:
- bb.data.setVar('TOPDIR', os.getcwd(), data)
-
-
-def supports(fn, d):
- return fn[-5:] == ".conf"
-
-def include(oldfn, fn, data, error_out):
- """
- error_out If True a ParseError will be raised if the to be included
- config-files could not be included.
- """
- if oldfn == fn: # prevent infinite recursion
- return None
-
- import bb
- fn = bb.data.expand(fn, data)
- oldfn = bb.data.expand(oldfn, data)
-
- if not os.path.isabs(fn):
- dname = os.path.dirname(oldfn)
- bbpath = "%s:%s" % (dname, bb.data.getVar("BBPATH", data, 1))
- abs_fn = bb.utils.which(bbpath, fn)
- if abs_fn:
- fn = abs_fn
-
- from bb.parse import handle
- try:
- ret = handle(fn, data, True)
- except IOError:
- if error_out:
- raise ParseError("Could not %(error_out)s file %(fn)s" % vars() )
- logger.debug(2, "CONF file '%s' not found", fn)
-
-def handle(fn, data, include):
- init(data)
-
- if include == 0:
- oldfile = None
- else:
- oldfile = bb.data.getVar('FILE', data)
-
- abs_fn = resolve_file(fn, data)
- f = open(abs_fn, 'r')
-
- if include:
- bb.parse.mark_dependency(data, abs_fn)
-
- statements = ast.StatementGroup()
- lineno = 0
- while True:
- lineno = lineno + 1
- s = f.readline()
- if not s: break
- w = s.strip()
- if not w: continue # skip empty lines
- s = s.rstrip()
- if s[0] == '#': continue # skip comments
- while s[-1] == '\\':
- s2 = f.readline()[:-1].strip()
- lineno = lineno + 1
- s = s[:-1] + s2
- feeder(lineno, s, fn, statements)
-
- # DONE WITH PARSING... time to evaluate
- bb.data.setVar('FILE', fn, data)
- statements.eval(data)
- if oldfile:
- bb.data.setVar('FILE', oldfile, data)
-
- return data
-
-def feeder(lineno, s, fn, statements):
- m = __config_regexp__.match(s)
- if m:
- groupd = m.groupdict()
- ast.handleData(statements, fn, lineno, groupd)
- return
-
- m = __include_regexp__.match(s)
- if m:
- ast.handleInclude(statements, fn, lineno, m, False)
- return
-
- m = __require_regexp__.match(s)
- if m:
- ast.handleInclude(statements, fn, lineno, m, True)
- return
-
- m = __export_regexp__.match(s)
- if m:
- ast.handleExport(statements, fn, lineno, m)
- return
-
- raise ParseError("%s:%d: unparsed line: '%s'" % (fn, lineno, s));
-
-# Add us to the handlers list
-from bb.parse import handlers
-handlers.append({'supports': supports, 'handle': handle, 'init': init})
-del handlers
diff --git a/bitbake/lib/bb/parse/parse_py/__init__.py b/bitbake/lib/bb/parse/parse_py/__init__.py
deleted file mode 100644
index 3e658d0de9..0000000000
--- a/bitbake/lib/bb/parse/parse_py/__init__.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake Parsers
-
-File parsers for the BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-from __future__ import absolute_import
-from . import ConfHandler
-from . import BBHandler
-
-__version__ = '1.0'
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py
deleted file mode 100644
index da05752311..0000000000
--- a/bitbake/lib/bb/persist_data.py
+++ /dev/null
@@ -1,194 +0,0 @@
-"""BitBake Persistent Data Store
-
-Used to store data in a central location such that other threads/tasks can
-access them at some future date. Acts as a convenience wrapper around sqlite,
-currently, providing a key/value store accessed by 'domain'.
-"""
-
-# Copyright (C) 2007 Richard Purdie
-# Copyright (C) 2010 Chris Larson <chris_larson@mentor.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import collections
-import logging
-import os.path
-import sys
-import warnings
-import bb.msg, bb.data, bb.utils
-
-try:
- import sqlite3
-except ImportError:
- from pysqlite2 import dbapi2 as sqlite3
-
-sqlversion = sqlite3.sqlite_version_info
-if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
- raise Exception("sqlite3 version 3.3.0 or later is required.")
-
-
-logger = logging.getLogger("BitBake.PersistData")
-
-
-class SQLTable(collections.MutableMapping):
- """Object representing a table/domain in the database"""
- def __init__(self, cursor, table):
- self.cursor = cursor
- self.table = table
-
- self._execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);"
- % table)
-
- def _execute(self, *query):
- """Execute a query, waiting to acquire a lock if necessary"""
- count = 0
- while True:
- try:
- return self.cursor.execute(*query)
- except sqlite3.OperationalError as exc:
- if 'database is locked' in str(exc) and count < 500:
- count = count + 1
- continue
- raise
-
- def __getitem__(self, key):
- data = self._execute("SELECT * from %s where key=?;" %
- self.table, [key])
- for row in data:
- return row[1]
-
- def __delitem__(self, key):
- self._execute("DELETE from %s where key=?;" % self.table, [key])
-
- def __setitem__(self, key, value):
- data = self._execute("SELECT * from %s where key=?;" %
- self.table, [key])
- exists = len(list(data))
- if exists:
- self._execute("UPDATE %s SET value=? WHERE key=?;" % self.table,
- [value, key])
- else:
- self._execute("INSERT into %s(key, value) values (?, ?);" %
- self.table, [key, value])
-
- def __contains__(self, key):
- return key in set(self)
-
- def __len__(self):
- data = self._execute("SELECT COUNT(key) FROM %s;" % self.table)
- for row in data:
- return row[0]
-
- def __iter__(self):
- data = self._execute("SELECT key FROM %s;" % self.table)
- for row in data:
- yield row[0]
-
- def iteritems(self):
- data = self._execute("SELECT * FROM %s;" % self.table)
- for row in data:
- yield row[0], row[1]
-
- def itervalues(self):
- data = self._execute("SELECT value FROM %s;" % self.table)
- for row in data:
- yield row[0]
-
-
-class SQLData(object):
- """Object representing the persistent data"""
- def __init__(self, filename):
- bb.utils.mkdirhier(os.path.dirname(filename))
-
- self.filename = filename
- self.connection = sqlite3.connect(filename, timeout=5,
- isolation_level=None)
- self.cursor = self.connection.cursor()
- self._tables = {}
-
- def __getitem__(self, table):
- if not isinstance(table, basestring):
- raise TypeError("table argument must be a string, not '%s'" %
- type(table))
-
- if table in self._tables:
- return self._tables[table]
- else:
- tableobj = self._tables[table] = SQLTable(self.cursor, table)
- return tableobj
-
- def __delitem__(self, table):
- if table in self._tables:
- del self._tables[table]
- self.cursor.execute("DROP TABLE IF EXISTS %s;" % table)
-
-
-class PersistData(object):
- """Deprecated representation of the bitbake persistent data store"""
- def __init__(self, d):
- warnings.warn("Use of PersistData will be deprecated in the future",
- category=PendingDeprecationWarning,
- stacklevel=2)
-
- self.data = persist(d)
- logger.debug(1, "Using '%s' as the persistent data cache",
- self.data.filename)
-
- def addDomain(self, domain):
- """
- Add a domain (pending deprecation)
- """
- return self.data[domain]
-
- def delDomain(self, domain):
- """
- Removes a domain and all the data it contains
- """
- del self.data[domain]
-
- def getKeyValues(self, domain):
- """
- Return a list of key + value pairs for a domain
- """
- return self.data[domain].items()
-
- def getValue(self, domain, key):
- """
- Return the value of a key for a domain
- """
- return self.data[domain][key]
-
- def setValue(self, domain, key, value):
- """
- Sets the value of a key for a domain
- """
- self.data[domain][key] = value
-
- def delValue(self, domain, key):
- """
- Deletes a key/value pair
- """
- del self.data[domain][key]
-
-
-def persist(d):
- """Convenience factory for construction of SQLData based upon metadata"""
- cachedir = (bb.data.getVar("PERSISTENT_DIR", d, True) or
- bb.data.getVar("CACHE", d, True))
- if not cachedir:
- logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable")
- sys.exit(1)
-
- cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3")
- return SQLData(cachefile)
diff --git a/bitbake/lib/bb/process.py b/bitbake/lib/bb/process.py
deleted file mode 100644
index 4150d80e06..0000000000
--- a/bitbake/lib/bb/process.py
+++ /dev/null
@@ -1,109 +0,0 @@
-import logging
-import signal
-import subprocess
-
-logger = logging.getLogger('BitBake.Process')
-
-def subprocess_setup():
- # Python installs a SIGPIPE handler by default. This is usually not what
- # non-Python subprocesses expect.
- signal.signal(signal.SIGPIPE, signal.SIG_DFL)
-
-class CmdError(RuntimeError):
- def __init__(self, command, msg=None):
- self.command = command
- self.msg = msg
-
- def __str__(self):
- if not isinstance(self.command, basestring):
- cmd = subprocess.list2cmdline(self.command)
- else:
- cmd = self.command
-
- msg = "Execution of '%s' failed" % cmd
- if self.msg:
- msg += ': %s' % self.msg
- return msg
-
-class NotFoundError(CmdError):
- def __str__(self):
- return CmdError.__str__(self) + ": command not found"
-
-class ExecutionError(CmdError):
- def __init__(self, command, exitcode, stdout = None, stderr = None):
- CmdError.__init__(self, command)
- self.exitcode = exitcode
- self.stdout = stdout
- self.stderr = stderr
-
- def __str__(self):
- message = ""
- if self.stderr:
- message += self.stderr
- if self.stdout:
- message += self.stdout
- if message:
- message = ":\n" + message
- return (CmdError.__str__(self) +
- " with exit code %s" % self.exitcode + message)
-
-class Popen(subprocess.Popen):
- defaults = {
- "close_fds": True,
- "preexec_fn": subprocess_setup,
- "stdout": subprocess.PIPE,
- "stderr": subprocess.STDOUT,
- "stdin": subprocess.PIPE,
- "shell": False,
- }
-
- def __init__(self, *args, **kwargs):
- options = dict(self.defaults)
- options.update(kwargs)
- subprocess.Popen.__init__(self, *args, **options)
-
-def _logged_communicate(pipe, log, input):
- if pipe.stdin:
- if input is not None:
- pipe.stdin.write(input)
- pipe.stdin.close()
-
- bufsize = 512
- outdata, errdata = [], []
- while pipe.poll() is None:
- if pipe.stdout is not None:
- data = pipe.stdout.read(bufsize)
- if data is not None:
- outdata.append(data)
- log.write(data)
-
- if pipe.stderr is not None:
- data = pipe.stderr.read(bufsize)
- if data is not None:
- errdata.append(data)
- log.write(data)
- return ''.join(outdata), ''.join(errdata)
-
-def run(cmd, input=None, log=None, **options):
- """Convenience function to run a command and return its output, raising an
- exception when the command fails"""
-
- if isinstance(cmd, basestring) and not "shell" in options:
- options["shell"] = True
-
- try:
- pipe = Popen(cmd, **options)
- except OSError, exc:
- if exc.errno == 2:
- raise NotFoundError(cmd)
- else:
- raise CmdError(cmd, exc)
-
- if log:
- stdout, stderr = _logged_communicate(pipe, log, input)
- else:
- stdout, stderr = pipe.communicate(input)
-
- if pipe.returncode != 0:
- raise ExecutionError(cmd, pipe.returncode, stdout, stderr)
- return stdout, stderr
diff --git a/bitbake/lib/bb/providers.py b/bitbake/lib/bb/providers.py
deleted file mode 100644
index dcba9ae255..0000000000
--- a/bitbake/lib/bb/providers.py
+++ /dev/null
@@ -1,330 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
-# Copyright (C) 2005 Holger Hans Peter Freyther
-# Copyright (C) 2005 ROAD GmbH
-# Copyright (C) 2006 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import re
-import logging
-from bb import data, utils
-import bb
-
-logger = logging.getLogger("BitBake.Provider")
-
-class NoProvider(Exception):
- """Exception raised when no provider of a build dependency can be found"""
-
-class NoRProvider(Exception):
- """Exception raised when no provider of a runtime dependency can be found"""
-
-
-def sortPriorities(pn, dataCache, pkg_pn = None):
- """
- Reorder pkg_pn by file priority and default preference
- """
-
- if not pkg_pn:
- pkg_pn = dataCache.pkg_pn
-
- files = pkg_pn[pn]
- priorities = {}
- for f in files:
- priority = dataCache.bbfile_priority[f]
- preference = dataCache.pkg_dp[f]
- if priority not in priorities:
- priorities[priority] = {}
- if preference not in priorities[priority]:
- priorities[priority][preference] = []
- priorities[priority][preference].append(f)
- tmp_pn = []
- for pri in sorted(priorities, lambda a, b: a - b):
- tmp_pref = []
- for pref in sorted(priorities[pri], lambda a, b: b - a):
- tmp_pref.extend(priorities[pri][pref])
- tmp_pn = [tmp_pref] + tmp_pn
-
- return tmp_pn
-
-def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
- """
- Check if the version pe,pv,pr is the preferred one.
- If there is preferred version defined and ends with '%', then pv has to start with that version after removing the '%'
- """
- if (pr == preferred_r or preferred_r == None):
- if (pe == preferred_e or preferred_e == None):
- if preferred_v == pv:
- return True
- if preferred_v != None and preferred_v.endswith('%') and pv.startswith(preferred_v[:len(preferred_v)-1]):
- return True
- return False
-
-def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
- """
- Find the first provider in pkg_pn with a PREFERRED_VERSION set.
- """
-
- preferred_file = None
- preferred_ver = None
-
- localdata = data.createCopy(cfgData)
- bb.data.setVar('OVERRIDES', "pn-%s:%s:%s" % (pn, pn, data.getVar('OVERRIDES', localdata)), localdata)
- bb.data.update_data(localdata)
-
- preferred_v = bb.data.getVar('PREFERRED_VERSION_%s' % pn, localdata, True)
- if preferred_v:
- m = re.match('(\d+:)*(.*)(_.*)*', preferred_v)
- if m:
- if m.group(1):
- preferred_e = int(m.group(1)[:-1])
- else:
- preferred_e = None
- preferred_v = m.group(2)
- if m.group(3):
- preferred_r = m.group(3)[1:]
- else:
- preferred_r = None
- else:
- preferred_e = None
- preferred_r = None
-
- for file_set in pkg_pn:
- for f in file_set:
- pe, pv, pr = dataCache.pkg_pepvpr[f]
- if preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
- preferred_file = f
- preferred_ver = (pe, pv, pr)
- break
- if preferred_file:
- break;
- if preferred_r:
- pv_str = '%s-%s' % (preferred_v, preferred_r)
- else:
- pv_str = preferred_v
- if not (preferred_e is None):
- pv_str = '%s:%s' % (preferred_e, pv_str)
- itemstr = ""
- if item:
- itemstr = " (for item %s)" % item
- if preferred_file is None:
- logger.info("preferred version %s of %s not available%s", pv_str, pn, itemstr)
- else:
- logger.debug(1, "selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
-
- return (preferred_ver, preferred_file)
-
-
-def findLatestProvider(pn, cfgData, dataCache, file_set):
- """
- Return the highest version of the providers in file_set.
- Take default preferences into account.
- """
- latest = None
- latest_p = 0
- latest_f = None
- for file_name in file_set:
- pe, pv, pr = dataCache.pkg_pepvpr[file_name]
- dp = dataCache.pkg_dp[file_name]
-
- if (latest is None) or ((latest_p == dp) and (utils.vercmp(latest, (pe, pv, pr)) < 0)) or (dp > latest_p):
- latest = (pe, pv, pr)
- latest_f = file_name
- latest_p = dp
-
- return (latest, latest_f)
-
-
-def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
- """
- If there is a PREFERRED_VERSION, find the highest-priority bbfile
- providing that version. If not, find the latest version provided by
- an bbfile in the highest-priority set.
- """
-
- sortpkg_pn = sortPriorities(pn, dataCache, pkg_pn)
- # Find the highest priority provider with a PREFERRED_VERSION set
- (preferred_ver, preferred_file) = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn, item)
- # Find the latest version of the highest priority provider
- (latest, latest_f) = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[0])
-
- if preferred_file is None:
- preferred_file = latest_f
- preferred_ver = latest
-
- return (latest, latest_f, preferred_ver, preferred_file)
-
-
-def _filterProviders(providers, item, cfgData, dataCache):
- """
- Take a list of providers and filter/reorder according to the
- environment variables and previous build results
- """
- eligible = []
- preferred_versions = {}
- sortpkg_pn = {}
-
- # The order of providers depends on the order of the files on the disk
- # up to here. Sort pkg_pn to make dependency issues reproducible rather
- # than effectively random.
- providers.sort()
-
- # Collate providers by PN
- pkg_pn = {}
- for p in providers:
- pn = dataCache.pkg_fn[p]
- if pn not in pkg_pn:
- pkg_pn[pn] = []
- pkg_pn[pn].append(p)
-
- logger.debug(1, "providers for %s are: %s", item, pkg_pn.keys())
-
- # First add PREFERRED_VERSIONS
- for pn in pkg_pn:
- sortpkg_pn[pn] = sortPriorities(pn, dataCache, pkg_pn)
- preferred_versions[pn] = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn[pn], item)
- if preferred_versions[pn][1]:
- eligible.append(preferred_versions[pn][1])
-
- # Now add latest versions
- for pn in sortpkg_pn:
- if pn in preferred_versions and preferred_versions[pn][1]:
- continue
- preferred_versions[pn] = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[pn][0])
- eligible.append(preferred_versions[pn][1])
-
- if len(eligible) == 0:
- logger.error("no eligible providers for %s", item)
- return 0
-
- # If pn == item, give it a slight default preference
- # This means PREFERRED_PROVIDER_foobar defaults to foobar if available
- for p in providers:
- pn = dataCache.pkg_fn[p]
- if pn != item:
- continue
- (newvers, fn) = preferred_versions[pn]
- if not fn in eligible:
- continue
- eligible.remove(fn)
- eligible = [fn] + eligible
-
- return eligible
-
-
-def filterProviders(providers, item, cfgData, dataCache):
- """
- Take a list of providers and filter/reorder according to the
- environment variables and previous build results
- Takes a "normal" target item
- """
-
- eligible = _filterProviders(providers, item, cfgData, dataCache)
-
- prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % item, cfgData, 1)
- if prefervar:
- dataCache.preferred[item] = prefervar
-
- foundUnique = False
- if item in dataCache.preferred:
- for p in eligible:
- pn = dataCache.pkg_fn[p]
- if dataCache.preferred[item] == pn:
- logger.verbose("selecting %s to satisfy %s due to PREFERRED_PROVIDERS", pn, item)
- eligible.remove(p)
- eligible = [p] + eligible
- foundUnique = True
- break
-
- logger.debug(1, "sorted providers for %s are: %s", item, eligible)
-
- return eligible, foundUnique
-
-def filterProvidersRunTime(providers, item, cfgData, dataCache):
- """
- Take a list of providers and filter/reorder according to the
- environment variables and previous build results
- Takes a "runtime" target item
- """
-
- eligible = _filterProviders(providers, item, cfgData, dataCache)
-
- # Should use dataCache.preferred here?
- preferred = []
- preferred_vars = []
- pns = {}
- for p in eligible:
- pns[dataCache.pkg_fn[p]] = p
- for p in eligible:
- pn = dataCache.pkg_fn[p]
- provides = dataCache.pn_provides[pn]
- for provide in provides:
- prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % provide, cfgData, 1)
- logger.verbose("checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys())
- if prefervar in pns and pns[prefervar] not in preferred:
- var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar)
- logger.verbose("selecting %s to satisfy runtime %s due to %s", prefervar, item, var)
- preferred_vars.append(var)
- pref = pns[prefervar]
- eligible.remove(pref)
- eligible = [pref] + eligible
- preferred.append(pref)
- break
-
- numberPreferred = len(preferred)
-
- if numberPreferred > 1:
- logger.error("Trying to resolve runtime dependency %s resulted in conflicting PREFERRED_PROVIDER entries being found.\nThe providers found were: %s\nThe PREFERRED_PROVIDER entries resulting in this conflict were: %s", item, preferred, preferred_vars)
-
- logger.debug(1, "sorted providers for %s are: %s", item, eligible)
-
- return eligible, numberPreferred
-
-regexp_cache = {}
-
-def getRuntimeProviders(dataCache, rdepend):
- """
- Return any providers of runtime dependency
- """
- rproviders = []
-
- if rdepend in dataCache.rproviders:
- rproviders += dataCache.rproviders[rdepend]
-
- if rdepend in dataCache.packages:
- rproviders += dataCache.packages[rdepend]
-
- if rproviders:
- return rproviders
-
- # Only search dynamic packages if we can't find anything in other variables
- for pattern in dataCache.packages_dynamic:
- pattern = pattern.replace('+', "\+")
- if pattern in regexp_cache:
- regexp = regexp_cache[pattern]
- else:
- try:
- regexp = re.compile(pattern)
- except:
- logger.error("Error parsing regular expression '%s'", pattern)
- raise
- regexp_cache[pattern] = regexp
- if regexp.match(rdepend):
- rproviders += dataCache.packages_dynamic[pattern]
-
- return rproviders
diff --git a/bitbake/lib/bb/pysh/__init__.py b/bitbake/lib/bb/pysh/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
--- a/bitbake/lib/bb/pysh/__init__.py
+++ /dev/null
diff --git a/bitbake/lib/bb/pysh/builtin.py b/bitbake/lib/bb/pysh/builtin.py
deleted file mode 100644
index 25ad22eb74..0000000000
--- a/bitbake/lib/bb/pysh/builtin.py
+++ /dev/null
@@ -1,710 +0,0 @@
-# builtin.py - builtins and utilities definitions for pysh.
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-"""Builtin and internal utilities implementations.
-
-- Beware not to use python interpreter environment as if it were the shell
-environment. For instance, commands working directory must be explicitely handled
-through env['PWD'] instead of relying on python working directory.
-"""
-import errno
-import optparse
-import os
-import re
-import subprocess
-import sys
-import time
-
-def has_subprocess_bug():
- return getattr(subprocess, 'list2cmdline') and \
- ( subprocess.list2cmdline(['']) == '' or \
- subprocess.list2cmdline(['foo|bar']) == 'foo|bar')
-
-# Detect python bug 1634343: "subprocess swallows empty arguments under win32"
-# <http://sourceforge.net/tracker/index.php?func=detail&aid=1634343&group_id=5470&atid=105470>
-# Also detect: "[ 1710802 ] subprocess must escape redirection characters under win32"
-# <http://sourceforge.net/tracker/index.php?func=detail&aid=1710802&group_id=5470&atid=105470>
-if has_subprocess_bug():
- import subprocess_fix
- subprocess.list2cmdline = subprocess_fix.list2cmdline
-
-from sherrors import *
-
-class NonExitingParser(optparse.OptionParser):
- """OptionParser default behaviour upon error is to print the error message and
- exit. Raise a utility error instead.
- """
- def error(self, msg):
- raise UtilityError(msg)
-
-#-------------------------------------------------------------------------------
-# set special builtin
-#-------------------------------------------------------------------------------
-OPT_SET = NonExitingParser(usage="set - set or unset options and positional parameters")
-OPT_SET.add_option( '-f', action='store_true', dest='has_f', default=False,
- help='The shell shall disable pathname expansion.')
-OPT_SET.add_option('-e', action='store_true', dest='has_e', default=False,
- help="""When this option is on, if a simple command fails for any of the \
- reasons listed in Consequences of Shell Errors or returns an exit status \
- value >0, and is not part of the compound list following a while, until, \
- or if keyword, and is not a part of an AND or OR list, and is not a \
- pipeline preceded by the ! reserved word, then the shell shall immediately \
- exit.""")
-OPT_SET.add_option('-x', action='store_true', dest='has_x', default=False,
- help="""The shell shall write to standard error a trace for each command \
- after it expands the command and before it executes it. It is unspecified \
- whether the command that turns tracing off is traced.""")
-
-def builtin_set(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_SET.parse_args(args)
- env = interp.get_env()
-
- if option.has_f:
- env.set_opt('-f')
- if option.has_e:
- env.set_opt('-e')
- if option.has_x:
- env.set_opt('-x')
- return 0
-
-#-------------------------------------------------------------------------------
-# shift special builtin
-#-------------------------------------------------------------------------------
-def builtin_shift(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- params = interp.get_env().get_positional_args()
- if args:
- try:
- n = int(args[0])
- if n > len(params):
- raise ValueError()
- except ValueError:
- return 1
- else:
- n = 1
-
- params[:n] = []
- interp.get_env().set_positional_args(params)
- return 0
-
-#-------------------------------------------------------------------------------
-# export special builtin
-#-------------------------------------------------------------------------------
-OPT_EXPORT = NonExitingParser(usage="set - set or unset options and positional parameters")
-OPT_EXPORT.add_option('-p', action='store_true', dest='has_p', default=False)
-
-def builtin_export(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_EXPORT.parse_args(args)
- if option.has_p:
- raise NotImplementedError()
-
- for arg in args:
- try:
- name, value = arg.split('=', 1)
- except ValueError:
- name, value = arg, None
- env = interp.get_env().export(name, value)
-
- return 0
-
-#-------------------------------------------------------------------------------
-# return special builtin
-#-------------------------------------------------------------------------------
-def builtin_return(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- res = 0
- if args:
- try:
- res = int(args[0])
- except ValueError:
- res = 0
- if not 0<=res<=255:
- res = 0
-
- # BUG: should be last executed command exit code
- raise ReturnSignal(res)
-
-#-------------------------------------------------------------------------------
-# trap special builtin
-#-------------------------------------------------------------------------------
-def builtin_trap(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- if len(args) < 2:
- stderr.write('trap: usage: trap [[arg] signal_spec ...]\n')
- return 2
-
- action = args[0]
- for sig in args[1:]:
- try:
- env.traps[sig] = action
- except Exception, e:
- stderr.write('trap: %s\n' % str(e))
- return 0
-
-#-------------------------------------------------------------------------------
-# unset special builtin
-#-------------------------------------------------------------------------------
-OPT_UNSET = NonExitingParser("unset - unset values and attributes of variables and functions")
-OPT_UNSET.add_option( '-f', action='store_true', dest='has_f', default=False)
-OPT_UNSET.add_option( '-v', action='store_true', dest='has_v', default=False)
-
-def builtin_unset(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_UNSET.parse_args(args)
-
- status = 0
- env = interp.get_env()
- for arg in args:
- try:
- if option.has_f:
- env.remove_function(arg)
- else:
- del env[arg]
- except KeyError:
- pass
- except VarAssignmentError:
- status = 1
-
- return status
-
-#-------------------------------------------------------------------------------
-# wait special builtin
-#-------------------------------------------------------------------------------
-def builtin_wait(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- return interp.wait([int(arg) for arg in args])
-
-#-------------------------------------------------------------------------------
-# cat utility
-#-------------------------------------------------------------------------------
-def utility_cat(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- if not args:
- args = ['-']
-
- status = 0
- for arg in args:
- if arg == '-':
- data = stdin.read()
- else:
- path = os.path.join(env['PWD'], arg)
- try:
- f = file(path, 'rb')
- try:
- data = f.read()
- finally:
- f.close()
- except IOError, e:
- if e.errno != errno.ENOENT:
- raise
- status = 1
- continue
- stdout.write(data)
- stdout.flush()
- return status
-
-#-------------------------------------------------------------------------------
-# cd utility
-#-------------------------------------------------------------------------------
-OPT_CD = NonExitingParser("cd - change the working directory")
-
-def utility_cd(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_CD.parse_args(args)
- env = interp.get_env()
-
- directory = None
- printdir = False
- if not args:
- home = env.get('HOME')
- if home:
- # Unspecified, do nothing
- return 0
- else:
- directory = home
- elif len(args)==1:
- directory = args[0]
- if directory=='-':
- if 'OLDPWD' not in env:
- raise UtilityError("OLDPWD not set")
- printdir = True
- directory = env['OLDPWD']
- else:
- raise UtilityError("too many arguments")
-
- curpath = None
- # Absolute directories will be handled correctly by the os.path.join call.
- if not directory.startswith('.') and not directory.startswith('..'):
- cdpaths = env.get('CDPATH', '.').split(';')
- for cdpath in cdpaths:
- p = os.path.join(cdpath, directory)
- if os.path.isdir(p):
- curpath = p
- break
-
- if curpath is None:
- curpath = directory
- curpath = os.path.join(env['PWD'], directory)
-
- env['OLDPWD'] = env['PWD']
- env['PWD'] = curpath
- if printdir:
- stdout.write('%s\n' % curpath)
- return 0
-
-#-------------------------------------------------------------------------------
-# colon utility
-#-------------------------------------------------------------------------------
-def utility_colon(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- return 0
-
-#-------------------------------------------------------------------------------
-# echo utility
-#-------------------------------------------------------------------------------
-def utility_echo(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- # Echo only takes arguments, no options. Use printf if you need fancy stuff.
- output = ' '.join(args) + '\n'
- stdout.write(output)
- stdout.flush()
- return 0
-
-#-------------------------------------------------------------------------------
-# egrep utility
-#-------------------------------------------------------------------------------
-# egrep is usually a shell script.
-# Unfortunately, pysh does not support shell scripts *with arguments* right now,
-# so the redirection is implemented here, assuming grep is available.
-def utility_egrep(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- return run_command('grep', ['-E'] + args, interp, env, stdin, stdout,
- stderr, debugflags)
-
-#-------------------------------------------------------------------------------
-# env utility
-#-------------------------------------------------------------------------------
-def utility_env(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- if args and args[0]=='-i':
- raise NotImplementedError('env: -i option is not implemented')
-
- i = 0
- for arg in args:
- if '=' not in arg:
- break
- # Update the current environment
- name, value = arg.split('=', 1)
- env[name] = value
- i += 1
-
- if args[i:]:
- # Find then execute the specified interpreter
- utility = env.find_in_path(args[i])
- if not utility:
- return 127
- args[i:i+1] = utility
- name = args[i]
- args = args[i+1:]
- try:
- return run_command(name, args, interp, env, stdin, stdout, stderr,
- debugflags)
- except UtilityError:
- stderr.write('env: failed to execute %s' % ' '.join([name]+args))
- return 126
- else:
- for pair in env.get_variables().iteritems():
- stdout.write('%s=%s\n' % pair)
- return 0
-
-#-------------------------------------------------------------------------------
-# exit utility
-#-------------------------------------------------------------------------------
-def utility_exit(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- res = None
- if args:
- try:
- res = int(args[0])
- except ValueError:
- res = None
- if not 0<=res<=255:
- res = None
-
- if res is None:
- # BUG: should be last executed command exit code
- res = 0
-
- raise ExitSignal(res)
-
-#-------------------------------------------------------------------------------
-# fgrep utility
-#-------------------------------------------------------------------------------
-# see egrep
-def utility_fgrep(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- return run_command('grep', ['-F'] + args, interp, env, stdin, stdout,
- stderr, debugflags)
-
-#-------------------------------------------------------------------------------
-# gunzip utility
-#-------------------------------------------------------------------------------
-# see egrep
-def utility_gunzip(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- return run_command('gzip', ['-d'] + args, interp, env, stdin, stdout,
- stderr, debugflags)
-
-#-------------------------------------------------------------------------------
-# kill utility
-#-------------------------------------------------------------------------------
-def utility_kill(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- for arg in args:
- pid = int(arg)
- status = subprocess.call(['pskill', '/T', str(pid)],
- shell=True,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- # pskill is asynchronous, hence the stupid polling loop
- while 1:
- p = subprocess.Popen(['pslist', str(pid)],
- shell=True,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- output = p.communicate()[0]
- if ('process %d was not' % pid) in output:
- break
- time.sleep(1)
- return status
-
-#-------------------------------------------------------------------------------
-# mkdir utility
-#-------------------------------------------------------------------------------
-OPT_MKDIR = NonExitingParser("mkdir - make directories.")
-OPT_MKDIR.add_option('-p', action='store_true', dest='has_p', default=False)
-
-def utility_mkdir(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- # TODO: implement umask
- # TODO: implement proper utility error report
- option, args = OPT_MKDIR.parse_args(args)
- for arg in args:
- path = os.path.join(env['PWD'], arg)
- if option.has_p:
- try:
- os.makedirs(path)
- except IOError, e:
- if e.errno != errno.EEXIST:
- raise
- else:
- os.mkdir(path)
- return 0
-
-#-------------------------------------------------------------------------------
-# netstat utility
-#-------------------------------------------------------------------------------
-def utility_netstat(name, args, interp, env, stdin, stdout, stderr, debugflags):
- # Do you really expect me to implement netstat ?
- # This empty form is enough for Mercurial tests since it's
- # supposed to generate nothing upon success. Faking this test
- # is not a big deal either.
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- return 0
-
-#-------------------------------------------------------------------------------
-# pwd utility
-#-------------------------------------------------------------------------------
-OPT_PWD = NonExitingParser("pwd - return working directory name")
-OPT_PWD.add_option('-L', action='store_true', dest='has_L', default=True,
- help="""If the PWD environment variable contains an absolute pathname of \
- the current directory that does not contain the filenames dot or dot-dot, \
- pwd shall write this pathname to standard output. Otherwise, the -L option \
- shall behave as the -P option.""")
-OPT_PWD.add_option('-P', action='store_true', dest='has_L', default=False,
- help="""The absolute pathname written shall not contain filenames that, in \
- the context of the pathname, refer to files of type symbolic link.""")
-
-def utility_pwd(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_PWD.parse_args(args)
- stdout.write('%s\n' % env['PWD'])
- return 0
-
-#-------------------------------------------------------------------------------
-# printf utility
-#-------------------------------------------------------------------------------
-RE_UNESCAPE = re.compile(r'(\\x[a-zA-Z0-9]{2}|\\[0-7]{1,3}|\\.)')
-
-def utility_printf(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- def replace(m):
- assert m.group()
- g = m.group()[1:]
- if g.startswith('x'):
- return chr(int(g[1:], 16))
- if len(g) <= 3 and len([c for c in g if c in '01234567']) == len(g):
- # Yay, an octal number
- return chr(int(g, 8))
- return {
- 'a': '\a',
- 'b': '\b',
- 'f': '\f',
- 'n': '\n',
- 'r': '\r',
- 't': '\t',
- 'v': '\v',
- '\\': '\\',
- }.get(g)
-
- # Convert escape sequences
- format = re.sub(RE_UNESCAPE, replace, args[0])
- stdout.write(format % tuple(args[1:]))
- return 0
-
-#-------------------------------------------------------------------------------
-# true utility
-#-------------------------------------------------------------------------------
-def utility_true(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- return 0
-
-#-------------------------------------------------------------------------------
-# sed utility
-#-------------------------------------------------------------------------------
-RE_SED = re.compile(r'^s(.).*\1[a-zA-Z]*$')
-
-# cygwin sed fails with some expressions when they do not end with a single space.
-# see unit tests for details. Interestingly, the same expressions works perfectly
-# in cygwin shell.
-def utility_sed(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- # Scan pattern arguments and append a space if necessary
- for i in xrange(len(args)):
- if not RE_SED.search(args[i]):
- continue
- args[i] = args[i] + ' '
-
- return run_command(name, args, interp, env, stdin, stdout,
- stderr, debugflags)
-
-#-------------------------------------------------------------------------------
-# sleep utility
-#-------------------------------------------------------------------------------
-def utility_sleep(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- time.sleep(int(args[0]))
- return 0
-
-#-------------------------------------------------------------------------------
-# sort utility
-#-------------------------------------------------------------------------------
-OPT_SORT = NonExitingParser("sort - sort, merge, or sequence check text files")
-
-def utility_sort(name, args, interp, env, stdin, stdout, stderr, debugflags):
-
- def sort(path):
- if path == '-':
- lines = stdin.readlines()
- else:
- try:
- f = file(path)
- try:
- lines = f.readlines()
- finally:
- f.close()
- except IOError, e:
- stderr.write(str(e) + '\n')
- return 1
-
- if lines and lines[-1][-1]!='\n':
- lines[-1] = lines[-1] + '\n'
- return lines
-
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_SORT.parse_args(args)
- alllines = []
-
- if len(args)<=0:
- args += ['-']
-
- # Load all files lines
- curdir = os.getcwd()
- try:
- os.chdir(env['PWD'])
- for path in args:
- alllines += sort(path)
- finally:
- os.chdir(curdir)
-
- alllines.sort()
- for line in alllines:
- stdout.write(line)
- return 0
-
-#-------------------------------------------------------------------------------
-# hg utility
-#-------------------------------------------------------------------------------
-
-hgcommands = [
- 'add',
- 'addremove',
- 'commit', 'ci',
- 'debugrename',
- 'debugwalk',
- 'falabala', # Dummy command used in a mercurial test
- 'incoming',
- 'locate',
- 'pull',
- 'push',
- 'qinit',
- 'remove', 'rm',
- 'rename', 'mv',
- 'revert',
- 'showconfig',
- 'status', 'st',
- 'strip',
- ]
-
-def rewriteslashes(name, args):
- # Several hg commands output file paths, rewrite the separators
- if len(args) > 1 and name.lower().endswith('python') \
- and args[0].endswith('hg'):
- for cmd in hgcommands:
- if cmd in args[1:]:
- return True
-
- # svn output contains many paths with OS specific separators.
- # Normalize these to unix paths.
- base = os.path.basename(name)
- if base.startswith('svn'):
- return True
-
- return False
-
-def rewritehg(output):
- if not output:
- return output
- # Rewrite os specific messages
- output = output.replace(': The system cannot find the file specified',
- ': No such file or directory')
- output = re.sub(': Access is denied.*$', ': Permission denied', output)
- output = output.replace(': No connection could be made because the target machine actively refused it',
- ': Connection refused')
- return output
-
-
-def run_command(name, args, interp, env, stdin, stdout,
- stderr, debugflags):
- # Execute the command
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- hgbin = interp.options().hgbinary
- ishg = hgbin and ('hg' in name or args and 'hg' in args[0])
- unixoutput = 'cygwin' in name or ishg
-
- exec_env = env.get_variables()
- try:
- # BUG: comparing file descriptor is clearly not a reliable way to tell
- # whether they point on the same underlying object. But in pysh limited
- # scope this is usually right, we do not expect complicated redirections
- # besides usual 2>&1.
- # Still there is one case we have but cannot deal with is when stdout
- # and stderr are redirected *by pysh caller*. This the reason for the
- # --redirect pysh() option.
- # Now, we want to know they are the same because we sometimes need to
- # transform the command output, mostly remove CR-LF to ensure that
- # command output is unix-like. Cygwin utilies are a special case because
- # they explicitely set their output streams to binary mode, so we have
- # nothing to do. For all others commands, we have to guess whether they
- # are sending text data, in which case the transformation must be done.
- # Again, the NUL character test is unreliable but should be enough for
- # hg tests.
- redirected = stdout.fileno()==stderr.fileno()
- if not redirected:
- p = subprocess.Popen([name] + args, cwd=env['PWD'], env=exec_env,
- stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- else:
- p = subprocess.Popen([name] + args, cwd=env['PWD'], env=exec_env,
- stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- out, err = p.communicate()
- except WindowsError, e:
- raise UtilityError(str(e))
-
- if not unixoutput:
- def encode(s):
- if '\0' in s:
- return s
- return s.replace('\r\n', '\n')
- else:
- encode = lambda s: s
-
- if rewriteslashes(name, args):
- encode1_ = encode
- def encode(s):
- s = encode1_(s)
- s = s.replace('\\\\', '\\')
- s = s.replace('\\', '/')
- return s
-
- if ishg:
- encode2_ = encode
- def encode(s):
- return rewritehg(encode2_(s))
-
- stdout.write(encode(out))
- if not redirected:
- stderr.write(encode(err))
- return p.returncode
-
diff --git a/bitbake/lib/bb/pysh/interp.py b/bitbake/lib/bb/pysh/interp.py
deleted file mode 100644
index efe5181e1e..0000000000
--- a/bitbake/lib/bb/pysh/interp.py
+++ /dev/null
@@ -1,1367 +0,0 @@
-# interp.py - shell interpreter for pysh.
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-"""Implement the shell interpreter.
-
-Most references are made to "The Open Group Base Specifications Issue 6".
-<http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html>
-"""
-# TODO: document the fact input streams must implement fileno() so Popen will work correctly.
-# it requires non-stdin stream to be implemented as files. Still to be tested...
-# DOC: pathsep is used in PATH instead of ':'. Clearly, there are path syntax issues here.
-# TODO: stop command execution upon error.
-# TODO: sort out the filename/io_number mess. It should be possible to use filenames only.
-# TODO: review subshell implementation
-# TODO: test environment cloning for non-special builtins
-# TODO: set -x should not rebuild commands from tokens, assignments/redirections are lost
-# TODO: unit test for variable assignment
-# TODO: test error management wrt error type/utility type
-# TODO: test for binary output everywhere
-# BUG: debug-parsing does not pass log file to PLY. Maybe a PLY upgrade is necessary.
-import base64
-import cPickle as pickle
-import errno
-import glob
-import os
-import re
-import subprocess
-import sys
-import tempfile
-
-try:
- s = set()
- del s
-except NameError:
- from Set import Set as set
-
-import builtin
-from sherrors import *
-import pyshlex
-import pyshyacc
-
-def mappend(func, *args, **kargs):
- """Like map but assume func returns a list. Returned lists are merged into
- a single one.
- """
- return reduce(lambda a,b: a+b, map(func, *args, **kargs), [])
-
-class FileWrapper:
- """File object wrapper to ease debugging.
-
- Allow mode checking and implement file duplication through a simple
- reference counting scheme. Not sure the latter is really useful since
- only real file descriptors can be used.
- """
- def __init__(self, mode, file, close=True):
- if mode not in ('r', 'w', 'a'):
- raise IOError('invalid mode: %s' % mode)
- self._mode = mode
- self._close = close
- if isinstance(file, FileWrapper):
- if file._refcount[0] <= 0:
- raise IOError(0, 'Error')
- self._refcount = file._refcount
- self._refcount[0] += 1
- self._file = file._file
- else:
- self._refcount = [1]
- self._file = file
-
- def dup(self):
- return FileWrapper(self._mode, self, self._close)
-
- def fileno(self):
- """fileno() should be only necessary for input streams."""
- return self._file.fileno()
-
- def read(self, size=-1):
- if self._mode!='r':
- raise IOError(0, 'Error')
- return self._file.read(size)
-
- def readlines(self, *args, **kwargs):
- return self._file.readlines(*args, **kwargs)
-
- def write(self, s):
- if self._mode not in ('w', 'a'):
- raise IOError(0, 'Error')
- return self._file.write(s)
-
- def flush(self):
- self._file.flush()
-
- def close(self):
- if not self._refcount:
- return
- assert self._refcount[0] > 0
-
- self._refcount[0] -= 1
- if self._refcount[0] == 0:
- self._mode = 'c'
- if self._close:
- self._file.close()
- self._refcount = None
-
- def mode(self):
- return self._mode
-
- def __getattr__(self, name):
- if name == 'name':
- self.name = getattr(self._file, name)
- return self.name
- else:
- raise AttributeError(name)
-
- def __del__(self):
- self.close()
-
-
-def win32_open_devnull(mode):
- return open('NUL', mode)
-
-
-class Redirections:
- """Stores open files and their mapping to pseudo-sh file descriptor.
- """
- # BUG: redirections are not handled correctly: 1>&3 2>&3 3>&4 does
- # not make 1 to redirect to 4
- def __init__(self, stdin=None, stdout=None, stderr=None):
- self._descriptors = {}
- if stdin is not None:
- self._add_descriptor(0, stdin)
- if stdout is not None:
- self._add_descriptor(1, stdout)
- if stderr is not None:
- self._add_descriptor(2, stderr)
-
- def add_here_document(self, interp, name, content, io_number=None):
- if io_number is None:
- io_number = 0
-
- if name==pyshlex.unquote_wordtree(name):
- content = interp.expand_here_document(('TOKEN', content))
-
- # Write document content in a temporary file
- tmp = tempfile.TemporaryFile()
- try:
- tmp.write(content)
- tmp.flush()
- tmp.seek(0)
- self._add_descriptor(io_number, FileWrapper('r', tmp))
- except:
- tmp.close()
- raise
-
- def add(self, interp, op, filename, io_number=None):
- if op not in ('<', '>', '>|', '>>', '>&'):
- # TODO: add descriptor duplication and here_documents
- raise RedirectionError('Unsupported redirection operator "%s"' % op)
-
- if io_number is not None:
- io_number = int(io_number)
-
- if (op == '>&' and filename.isdigit()) or filename=='-':
- # No expansion for file descriptors, quote them if you want a filename
- fullname = filename
- else:
- if filename.startswith('/'):
- # TODO: win32 kludge
- if filename=='/dev/null':
- fullname = 'NUL'
- else:
- # TODO: handle absolute pathnames, they are unlikely to exist on the
- # current platform (win32 for instance).
- raise NotImplementedError()
- else:
- fullname = interp.expand_redirection(('TOKEN', filename))
- if not fullname:
- raise RedirectionError('%s: ambiguous redirect' % filename)
- # Build absolute path based on PWD
- fullname = os.path.join(interp.get_env()['PWD'], fullname)
-
- if op=='<':
- return self._add_input_redirection(interp, fullname, io_number)
- elif op in ('>', '>|'):
- clobber = ('>|'==op)
- return self._add_output_redirection(interp, fullname, io_number, clobber)
- elif op=='>>':
- return self._add_output_appending(interp, fullname, io_number)
- elif op=='>&':
- return self._dup_output_descriptor(fullname, io_number)
-
- def close(self):
- if self._descriptors is not None:
- for desc in self._descriptors.itervalues():
- desc.flush()
- desc.close()
- self._descriptors = None
-
- def stdin(self):
- return self._descriptors[0]
-
- def stdout(self):
- return self._descriptors[1]
-
- def stderr(self):
- return self._descriptors[2]
-
- def clone(self):
- clone = Redirections()
- for desc, fileobj in self._descriptors.iteritems():
- clone._descriptors[desc] = fileobj.dup()
- return clone
-
- def _add_output_redirection(self, interp, filename, io_number, clobber):
- if io_number is None:
- # io_number default to standard output
- io_number = 1
-
- if not clobber and interp.get_env().has_opt('-C') and os.path.isfile(filename):
- # File already exist in no-clobber mode, bail out
- raise RedirectionError('File "%s" already exists' % filename)
-
- # Open and register
- self._add_file_descriptor(io_number, filename, 'w')
-
- def _add_output_appending(self, interp, filename, io_number):
- if io_number is None:
- io_number = 1
- self._add_file_descriptor(io_number, filename, 'a')
-
- def _add_input_redirection(self, interp, filename, io_number):
- if io_number is None:
- io_number = 0
- self._add_file_descriptor(io_number, filename, 'r')
-
- def _add_file_descriptor(self, io_number, filename, mode):
- try:
- if filename.startswith('/'):
- if filename=='/dev/null':
- f = win32_open_devnull(mode+'b')
- else:
- # TODO: handle absolute pathnames, they are unlikely to exist on the
- # current platform (win32 for instance).
- raise NotImplementedError('cannot open absolute path %s' % repr(filename))
- else:
- f = file(filename, mode+'b')
- except IOError, e:
- raise RedirectionError(str(e))
-
- wrapper = None
- try:
- wrapper = FileWrapper(mode, f)
- f = None
- self._add_descriptor(io_number, wrapper)
- except:
- if f: f.close()
- if wrapper: wrapper.close()
- raise
-
- def _dup_output_descriptor(self, source_fd, dest_fd):
- if source_fd is None:
- source_fd = 1
- self._dup_file_descriptor(source_fd, dest_fd, 'w')
-
- def _dup_file_descriptor(self, source_fd, dest_fd, mode):
- source_fd = int(source_fd)
- if source_fd not in self._descriptors:
- raise RedirectionError('"%s" is not a valid file descriptor' % str(source_fd))
- source = self._descriptors[source_fd]
-
- if source.mode()!=mode:
- raise RedirectionError('Descriptor %s cannot be duplicated in mode "%s"' % (str(source), mode))
-
- if dest_fd=='-':
- # Close the source descriptor
- del self._descriptors[source_fd]
- source.close()
- else:
- dest_fd = int(dest_fd)
- if dest_fd not in self._descriptors:
- raise RedirectionError('Cannot replace file descriptor %s' % str(dest_fd))
-
- dest = self._descriptors[dest_fd]
- if dest.mode()!=mode:
- raise RedirectionError('Descriptor %s cannot be cannot be redirected in mode "%s"' % (str(dest), mode))
-
- self._descriptors[dest_fd] = source.dup()
- dest.close()
-
- def _add_descriptor(self, io_number, file):
- io_number = int(io_number)
-
- if io_number in self._descriptors:
- # Close the current descriptor
- d = self._descriptors[io_number]
- del self._descriptors[io_number]
- d.close()
-
- self._descriptors[io_number] = file
-
- def __str__(self):
- names = [('%d=%r' % (k, getattr(v, 'name', None))) for k,v
- in self._descriptors.iteritems()]
- names = ','.join(names)
- return 'Redirections(%s)' % names
-
- def __del__(self):
- self.close()
-
-def cygwin_to_windows_path(path):
- """Turn /cygdrive/c/foo into c:/foo, or return path if it
- is not a cygwin path.
- """
- if not path.startswith('/cygdrive/'):
- return path
- path = path[len('/cygdrive/'):]
- path = path[:1] + ':' + path[1:]
- return path
-
-def win32_to_unix_path(path):
- if path is not None:
- path = path.replace('\\', '/')
- return path
-
-_RE_SHEBANG = re.compile(r'^\#!\s?([^\s]+)(?:\s([^\s]+))?')
-_SHEBANG_CMDS = {
- '/usr/bin/env': 'env',
- '/bin/sh': 'pysh',
- 'python': 'python',
-}
-
-def resolve_shebang(path, ignoreshell=False):
- """Return a list of arguments as shebang interpreter call or an empty list
- if path does not refer to an executable script.
- See <http://www.opengroup.org/austin/docs/austin_51r2.txt>.
-
- ignoreshell - set to True to ignore sh shebangs. Return an empty list instead.
- """
- try:
- f = file(path)
- try:
- # At most 80 characters in the first line
- header = f.read(80).splitlines()[0]
- finally:
- f.close()
-
- m = _RE_SHEBANG.search(header)
- if not m:
- return []
- cmd, arg = m.group(1,2)
- if os.path.isfile(cmd):
- # Keep this one, the hg script for instance contains a weird windows
- # shebang referencing the current python install.
- cmdfile = os.path.basename(cmd).lower()
- if cmdfile == 'python.exe':
- cmd = 'python'
- pass
- elif cmd not in _SHEBANG_CMDS:
- raise CommandNotFound('Unknown interpreter "%s" referenced in '\
- 'shebang' % header)
- cmd = _SHEBANG_CMDS.get(cmd)
- if cmd is None or (ignoreshell and cmd == 'pysh'):
- return []
- if arg is None:
- return [cmd, win32_to_unix_path(path)]
- return [cmd, arg, win32_to_unix_path(path)]
- except IOError, e:
- if e.errno!=errno.ENOENT and \
- (e.errno!=errno.EPERM and not os.path.isdir(path)): # Opening a directory raises EPERM
- raise
- return []
-
-def win32_find_in_path(name, path):
- if isinstance(path, str):
- path = path.split(os.pathsep)
-
- exts = os.environ.get('PATHEXT', '').lower().split(os.pathsep)
- for p in path:
- p_name = os.path.join(p, name)
-
- prefix = resolve_shebang(p_name)
- if prefix:
- return prefix
-
- for ext in exts:
- p_name_ext = p_name + ext
- if os.path.exists(p_name_ext):
- return [win32_to_unix_path(p_name_ext)]
- return []
-
-class Traps(dict):
- def __setitem__(self, key, value):
- if key not in ('EXIT',):
- raise NotImplementedError()
- super(Traps, self).__setitem__(key, value)
-
-# IFS white spaces character class
-_IFS_WHITESPACES = (' ', '\t', '\n')
-
-class Environment:
- """Environment holds environment variables, export table, function
- definitions and whatever is defined in 2.12 "Shell Execution Environment",
- redirection excepted.
- """
- def __init__(self, pwd):
- self._opt = set() #Shell options
-
- self._functions = {}
- self._env = {'?': '0', '#': '0'}
- self._exported = set([
- 'HOME', 'IFS', 'PATH'
- ])
-
- # Set environment vars with side-effects
- self._ifs_ws = None # Set of IFS whitespace characters
- self._ifs_re = None # Regular expression used to split between words using IFS classes
- self['IFS'] = ''.join(_IFS_WHITESPACES) #Default environment values
- self['PWD'] = pwd
- self.traps = Traps()
-
- def clone(self, subshell=False):
- env = Environment(self['PWD'])
- env._opt = set(self._opt)
- for k,v in self.get_variables().iteritems():
- if k in self._exported:
- env.export(k,v)
- elif subshell:
- env[k] = v
-
- if subshell:
- env._functions = dict(self._functions)
-
- return env
-
- def __getitem__(self, key):
- if key in ('@', '*', '-', '$'):
- raise NotImplementedError('%s is not implemented' % repr(key))
- return self._env[key]
-
- def get(self, key, defval=None):
- try:
- return self[key]
- except KeyError:
- return defval
-
- def __setitem__(self, key, value):
- if key=='IFS':
- # Update the whitespace/non-whitespace classes
- self._update_ifs(value)
- elif key=='PWD':
- pwd = os.path.abspath(value)
- if not os.path.isdir(pwd):
- raise VarAssignmentError('Invalid directory %s' % value)
- value = pwd
- elif key in ('?', '!'):
- value = str(int(value))
- self._env[key] = value
-
- def __delitem__(self, key):
- if key in ('IFS', 'PWD', '?'):
- raise VarAssignmentError('%s cannot be unset' % key)
- del self._env[key]
-
- def __contains__(self, item):
- return item in self._env
-
- def set_positional_args(self, args):
- """Set the content of 'args' as positional argument from 1 to len(args).
- Return previous argument as a list of strings.
- """
- # Save and remove previous arguments
- prevargs = []
- for i in xrange(int(self._env['#'])):
- i = str(i+1)
- prevargs.append(self._env[i])
- del self._env[i]
- self._env['#'] = '0'
-
- #Set new ones
- for i,arg in enumerate(args):
- self._env[str(i+1)] = str(arg)
- self._env['#'] = str(len(args))
-
- return prevargs
-
- def get_positional_args(self):
- return [self._env[str(i+1)] for i in xrange(int(self._env['#']))]
-
- def get_variables(self):
- return dict(self._env)
-
- def export(self, key, value=None):
- if value is not None:
- self[key] = value
- self._exported.add(key)
-
- def get_exported(self):
- return [(k,self._env.get(k)) for k in self._exported]
-
- def split_fields(self, word):
- if not self._ifs_ws or not word:
- return [word]
- return re.split(self._ifs_re, word)
-
- def _update_ifs(self, value):
- """Update the split_fields related variables when IFS character set is
- changed.
- """
- # TODO: handle NULL IFS
-
- # Separate characters in whitespace and non-whitespace
- chars = set(value)
- ws = [c for c in chars if c in _IFS_WHITESPACES]
- nws = [c for c in chars if c not in _IFS_WHITESPACES]
-
- # Keep whitespaces in a string for left and right stripping
- self._ifs_ws = ''.join(ws)
-
- # Build a regexp to split fields
- trailing = '[' + ''.join([re.escape(c) for c in ws]) + ']'
- if nws:
- # First, the single non-whitespace occurence.
- nws = '[' + ''.join([re.escape(c) for c in nws]) + ']'
- nws = '(?:' + trailing + '*' + nws + trailing + '*' + '|' + trailing + '+)'
- else:
- # Then mix all parts with quantifiers
- nws = trailing + '+'
- self._ifs_re = re.compile(nws)
-
- def has_opt(self, opt, val=None):
- return (opt, val) in self._opt
-
- def set_opt(self, opt, val=None):
- self._opt.add((opt, val))
-
- def find_in_path(self, name, pwd=False):
- path = self._env.get('PATH', '').split(os.pathsep)
- if pwd:
- path[:0] = [self['PWD']]
- if os.name == 'nt':
- return win32_find_in_path(name, self._env.get('PATH', ''))
- else:
- raise NotImplementedError()
-
- def define_function(self, name, body):
- if not is_name(name):
- raise ShellSyntaxError('%s is not a valid function name' % repr(name))
- self._functions[name] = body
-
- def remove_function(self, name):
- del self._functions[name]
-
- def is_function(self, name):
- return name in self._functions
-
- def get_function(self, name):
- return self._functions.get(name)
-
-
-name_charset = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
-name_charset = dict(zip(name_charset,name_charset))
-
-def match_name(s):
- """Return the length in characters of the longest prefix made of name
- allowed characters in s.
- """
- for i,c in enumerate(s):
- if c not in name_charset:
- return s[:i]
- return s
-
-def is_name(s):
- return len([c for c in s if c not in name_charset])<=0
-
-def is_special_param(c):
- return len(c)==1 and c in ('@','*','#','?','-','$','!','0')
-
-def utility_not_implemented(name, *args, **kwargs):
- raise NotImplementedError('%s utility is not implemented' % name)
-
-
-class Utility:
- """Define utilities properties:
- func -- utility callable. See builtin module for utility samples.
- is_special -- see XCU 2.8.
- """
- def __init__(self, func, is_special=0):
- self.func = func
- self.is_special = bool(is_special)
-
-
-def encodeargs(args):
- def encodearg(s):
- lines = base64.encodestring(s)
- lines = [l.splitlines()[0] for l in lines]
- return ''.join(lines)
-
- s = pickle.dumps(args)
- return encodearg(s)
-
-def decodeargs(s):
- s = base64.decodestring(s)
- return pickle.loads(s)
-
-
-class GlobError(Exception):
- pass
-
-class Options:
- def __init__(self):
- # True if Mercurial operates with binary streams
- self.hgbinary = True
-
-class Interpreter:
- # Implementation is very basic: the execute() method just makes a DFS on the
- # AST and execute nodes one by one. Nodes are tuple (name,obj) where name
- # is a string identifier and obj the AST element returned by the parser.
- #
- # Handler are named after the node identifiers.
- # TODO: check node names and remove the switch in execute with some
- # dynamic getattr() call to find node handlers.
- """Shell interpreter.
-
- The following debugging flags can be passed:
- debug-parsing - enable PLY debugging.
- debug-tree - print the generated AST.
- debug-cmd - trace command execution before word expansion, plus exit status.
- debug-utility - trace utility execution.
- """
-
- # List supported commands.
- COMMANDS = {
- 'cat': Utility(builtin.utility_cat,),
- 'cd': Utility(builtin.utility_cd,),
- ':': Utility(builtin.utility_colon,),
- 'echo': Utility(builtin.utility_echo),
- 'env': Utility(builtin.utility_env),
- 'exit': Utility(builtin.utility_exit),
- 'export': Utility(builtin.builtin_export, is_special=1),
- 'egrep': Utility(builtin.utility_egrep),
- 'fgrep': Utility(builtin.utility_fgrep),
- 'gunzip': Utility(builtin.utility_gunzip),
- 'kill': Utility(builtin.utility_kill),
- 'mkdir': Utility(builtin.utility_mkdir),
- 'netstat': Utility(builtin.utility_netstat),
- 'printf': Utility(builtin.utility_printf),
- 'pwd': Utility(builtin.utility_pwd),
- 'return': Utility(builtin.builtin_return, is_special=1),
- 'sed': Utility(builtin.utility_sed,),
- 'set': Utility(builtin.builtin_set,),
- 'shift': Utility(builtin.builtin_shift,),
- 'sleep': Utility(builtin.utility_sleep,),
- 'sort': Utility(builtin.utility_sort,),
- 'trap': Utility(builtin.builtin_trap, is_special=1),
- 'true': Utility(builtin.utility_true),
- 'unset': Utility(builtin.builtin_unset, is_special=1),
- 'wait': Utility(builtin.builtin_wait, is_special=1),
- }
-
- def __init__(self, pwd, debugflags = [], env=None, redirs=None, stdin=None,
- stdout=None, stderr=None, opts=Options()):
- self._env = env
- if self._env is None:
- self._env = Environment(pwd)
- self._children = {}
-
- self._redirs = redirs
- self._close_redirs = False
-
- if self._redirs is None:
- if stdin is None:
- stdin = sys.stdin
- if stdout is None:
- stdout = sys.stdout
- if stderr is None:
- stderr = sys.stderr
- stdin = FileWrapper('r', stdin, False)
- stdout = FileWrapper('w', stdout, False)
- stderr = FileWrapper('w', stderr, False)
- self._redirs = Redirections(stdin, stdout, stderr)
- self._close_redirs = True
-
- self._debugflags = list(debugflags)
- self._logfile = sys.stderr
- self._options = opts
-
- def close(self):
- """Must be called when the interpreter is no longer used."""
- script = self._env.traps.get('EXIT')
- if script:
- try:
- self.execute_script(script=script)
- except:
- pass
-
- if self._redirs is not None and self._close_redirs:
- self._redirs.close()
- self._redirs = None
-
- def log(self, s):
- self._logfile.write(s)
- self._logfile.flush()
-
- def __getitem__(self, key):
- return self._env[key]
-
- def __setitem__(self, key, value):
- self._env[key] = value
-
- def options(self):
- return self._options
-
- def redirect(self, redirs, ios):
- def add_redir(io):
- if isinstance(io, pyshyacc.IORedirect):
- redirs.add(self, io.op, io.filename, io.io_number)
- else:
- redirs.add_here_document(self, io.name, io.content, io.io_number)
-
- map(add_redir, ios)
- return redirs
-
- def execute_script(self, script=None, ast=None, sourced=False,
- scriptpath=None):
- """If script is not None, parse the input. Otherwise takes the supplied
- AST. Then execute the AST.
- Return the script exit status.
- """
- try:
- if scriptpath is not None:
- self._env['0'] = os.path.abspath(scriptpath)
-
- if script is not None:
- debug_parsing = ('debug-parsing' in self._debugflags)
- cmds, script = pyshyacc.parse(script, True, debug_parsing)
- if 'debug-tree' in self._debugflags:
- pyshyacc.print_commands(cmds, self._logfile)
- self._logfile.flush()
- else:
- cmds, script = ast, ''
-
- status = 0
- for cmd in cmds:
- try:
- status = self.execute(cmd)
- except ExitSignal, e:
- if sourced:
- raise
- status = int(e.args[0])
- return status
- except ShellError:
- self._env['?'] = 1
- raise
- if 'debug-utility' in self._debugflags or 'debug-cmd' in self._debugflags:
- self.log('returncode ' + str(status)+ '\n')
- return status
- except CommandNotFound, e:
- print >>self._redirs.stderr, str(e)
- self._redirs.stderr.flush()
- # Command not found by non-interactive shell
- # return 127
- raise
- except RedirectionError, e:
- # TODO: should be handled depending on the utility status
- print >>self._redirs.stderr, str(e)
- self._redirs.stderr.flush()
- # Command not found by non-interactive shell
- # return 127
- raise
-
- def dotcommand(self, env, args):
- if len(args) < 1:
- raise ShellError('. expects at least one argument')
- path = args[0]
- if '/' not in path:
- found = env.find_in_path(args[0], True)
- if found:
- path = found[0]
- script = file(path).read()
- return self.execute_script(script=script, sourced=True)
-
- def execute(self, token, redirs=None):
- """Execute and AST subtree with supplied redirections overriding default
- interpreter ones.
- Return the exit status.
- """
- if not token:
- return 0
-
- if redirs is None:
- redirs = self._redirs
-
- if isinstance(token, list):
- # Commands sequence
- res = 0
- for t in token:
- res = self.execute(t, redirs)
- return res
-
- type, value = token
- status = 0
- if type=='simple_command':
- redirs_copy = redirs.clone()
- try:
- # TODO: define and handle command return values
- # TODO: implement set -e
- status = self._execute_simple_command(value, redirs_copy)
- finally:
- redirs_copy.close()
- elif type=='pipeline':
- status = self._execute_pipeline(value, redirs)
- elif type=='and_or':
- status = self._execute_and_or(value, redirs)
- elif type=='for_clause':
- status = self._execute_for_clause(value, redirs)
- elif type=='while_clause':
- status = self._execute_while_clause(value, redirs)
- elif type=='function_definition':
- status = self._execute_function_definition(value, redirs)
- elif type=='brace_group':
- status = self._execute_brace_group(value, redirs)
- elif type=='if_clause':
- status = self._execute_if_clause(value, redirs)
- elif type=='subshell':
- status = self.subshell(ast=value.cmds, redirs=redirs)
- elif type=='async':
- status = self._asynclist(value)
- elif type=='redirect_list':
- redirs_copy = self.redirect(redirs.clone(), value.redirs)
- try:
- status = self.execute(value.cmd, redirs_copy)
- finally:
- redirs_copy.close()
- else:
- raise NotImplementedError('Unsupported token type ' + type)
-
- if status < 0:
- status = 255
- return status
-
- def _execute_if_clause(self, if_clause, redirs):
- cond_status = self.execute(if_clause.cond, redirs)
- if cond_status==0:
- return self.execute(if_clause.if_cmds, redirs)
- else:
- return self.execute(if_clause.else_cmds, redirs)
-
- def _execute_brace_group(self, group, redirs):
- status = 0
- for cmd in group.cmds:
- status = self.execute(cmd, redirs)
- return status
-
- def _execute_function_definition(self, fundef, redirs):
- self._env.define_function(fundef.name, fundef.body)
- return 0
-
- def _execute_while_clause(self, while_clause, redirs):
- status = 0
- while 1:
- cond_status = 0
- for cond in while_clause.condition:
- cond_status = self.execute(cond, redirs)
-
- if cond_status:
- break
-
- for cmd in while_clause.cmds:
- status = self.execute(cmd, redirs)
-
- return status
-
- def _execute_for_clause(self, for_clause, redirs):
- if not is_name(for_clause.name):
- raise ShellSyntaxError('%s is not a valid name' % repr(for_clause.name))
- items = mappend(self.expand_token, for_clause.items)
-
- status = 0
- for item in items:
- self._env[for_clause.name] = item
- for cmd in for_clause.cmds:
- status = self.execute(cmd, redirs)
- return status
-
- def _execute_and_or(self, or_and, redirs):
- res = self.execute(or_and.left, redirs)
- if (or_and.op=='&&' and res==0) or (or_and.op!='&&' and res!=0):
- res = self.execute(or_and.right, redirs)
- return res
-
- def _execute_pipeline(self, pipeline, redirs):
- if len(pipeline.commands)==1:
- status = self.execute(pipeline.commands[0], redirs)
- else:
- # Execute all commands one after the other
- status = 0
- inpath, outpath = None, None
- try:
- # Commands inputs and outputs cannot really be plugged as done
- # by a real shell. Run commands sequentially and chain their
- # input/output throught temporary files.
- tmpfd, inpath = tempfile.mkstemp()
- os.close(tmpfd)
- tmpfd, outpath = tempfile.mkstemp()
- os.close(tmpfd)
-
- inpath = win32_to_unix_path(inpath)
- outpath = win32_to_unix_path(outpath)
-
- for i, cmd in enumerate(pipeline.commands):
- call_redirs = redirs.clone()
- try:
- if i!=0:
- call_redirs.add(self, '<', inpath)
- if i!=len(pipeline.commands)-1:
- call_redirs.add(self, '>', outpath)
-
- status = self.execute(cmd, call_redirs)
-
- # Chain inputs/outputs
- inpath, outpath = outpath, inpath
- finally:
- call_redirs.close()
- finally:
- if inpath: os.remove(inpath)
- if outpath: os.remove(outpath)
-
- if pipeline.reverse_status:
- status = int(not status)
- self._env['?'] = status
- return status
-
- def _execute_function(self, name, args, interp, env, stdin, stdout, stderr, *others):
- assert interp is self
-
- func = env.get_function(name)
- #Set positional parameters
- prevargs = None
- try:
- prevargs = env.set_positional_args(args)
- try:
- redirs = Redirections(stdin.dup(), stdout.dup(), stderr.dup())
- try:
- status = self.execute(func, redirs)
- finally:
- redirs.close()
- except ReturnSignal, e:
- status = int(e.args[0])
- env['?'] = status
- return status
- finally:
- #Reset positional parameters
- if prevargs is not None:
- env.set_positional_args(prevargs)
-
- def _execute_simple_command(self, token, redirs):
- """Can raise ReturnSignal when return builtin is called, ExitSignal when
- exit is called, and other shell exceptions upon builtin failures.
- """
- debug_command = 'debug-cmd' in self._debugflags
- if debug_command:
- self.log('word' + repr(token.words) + '\n')
- self.log('assigns' + repr(token.assigns) + '\n')
- self.log('redirs' + repr(token.redirs) + '\n')
-
- is_special = None
- env = self._env
-
- try:
- # Word expansion
- args = []
- for word in token.words:
- args += self.expand_token(word)
- if is_special is None and args:
- is_special = env.is_function(args[0]) or \
- (args[0] in self.COMMANDS and self.COMMANDS[args[0]].is_special)
-
- if debug_command:
- self.log('_execute_simple_command' + str(args) + '\n')
-
- if not args:
- # Redirections happen is a subshell
- redirs = redirs.clone()
- elif not is_special:
- env = self._env.clone()
-
- # Redirections
- self.redirect(redirs, token.redirs)
-
- # Variables assignments
- res = 0
- for type,(k,v) in token.assigns:
- status, expanded = self.expand_variable((k,v))
- if status is not None:
- res = status
- if args:
- env.export(k, expanded)
- else:
- env[k] = expanded
-
- if args and args[0] in ('.', 'source'):
- res = self.dotcommand(env, args[1:])
- elif args:
- if args[0] in self.COMMANDS:
- command = self.COMMANDS[args[0]]
- elif env.is_function(args[0]):
- command = Utility(self._execute_function, is_special=True)
- else:
- if not '/' in args[0].replace('\\', '/'):
- cmd = env.find_in_path(args[0])
- if not cmd:
- # TODO: test error code on unknown command => 127
- raise CommandNotFound('Unknown command: "%s"' % args[0])
- else:
- # Handle commands like '/cygdrive/c/foo.bat'
- cmd = cygwin_to_windows_path(args[0])
- if not os.path.exists(cmd):
- raise CommandNotFound('%s: No such file or directory' % args[0])
- shebang = resolve_shebang(cmd)
- if shebang:
- cmd = shebang
- else:
- cmd = [cmd]
- args[0:1] = cmd
- command = Utility(builtin.run_command)
-
- # Command execution
- if 'debug-cmd' in self._debugflags:
- self.log('redirections ' + str(redirs) + '\n')
-
- res = command.func(args[0], args[1:], self, env,
- redirs.stdin(), redirs.stdout(),
- redirs.stderr(), self._debugflags)
-
- if self._env.has_opt('-x'):
- # Trace command execution in shell environment
- # BUG: would be hard to reproduce a real shell behaviour since
- # the AST is not annotated with source lines/tokens.
- self._redirs.stdout().write(' '.join(args))
-
- except ReturnSignal:
- raise
- except ShellError, e:
- if is_special or isinstance(e, (ExitSignal,
- ShellSyntaxError, ExpansionError)):
- raise e
- self._redirs.stderr().write(str(e)+'\n')
- return 1
-
- return res
-
- def expand_token(self, word):
- """Expand a word as specified in [2.6 Word Expansions]. Return the list
- of expanded words.
- """
- status, wtrees = self._expand_word(word)
- return map(pyshlex.wordtree_as_string, wtrees)
-
- def expand_variable(self, word):
- """Return a status code (or None if no command expansion occurred)
- and a single word.
- """
- status, wtrees = self._expand_word(word, pathname=False, split=False)
- words = map(pyshlex.wordtree_as_string, wtrees)
- assert len(words)==1
- return status, words[0]
-
- def expand_here_document(self, word):
- """Return the expanded document as a single word. The here document is
- assumed to be unquoted.
- """
- status, wtrees = self._expand_word(word, pathname=False,
- split=False, here_document=True)
- words = map(pyshlex.wordtree_as_string, wtrees)
- assert len(words)==1
- return words[0]
-
- def expand_redirection(self, word):
- """Return a single word."""
- return self.expand_variable(word)[1]
-
- def get_env(self):
- return self._env
-
- def _expand_word(self, token, pathname=True, split=True, here_document=False):
- wtree = pyshlex.make_wordtree(token[1], here_document=here_document)
-
- # TODO: implement tilde expansion
- def expand(wtree):
- """Return a pseudo wordtree: the tree or its subelements can be empty
- lists when no value result from the expansion.
- """
- status = None
- for part in wtree:
- if not isinstance(part, list):
- continue
- if part[0]in ("'", '\\'):
- continue
- elif part[0] in ('`', '$('):
- status, result = self._expand_command(part)
- part[:] = result
- elif part[0] in ('$', '${'):
- part[:] = self._expand_parameter(part, wtree[0]=='"', split)
- elif part[0] in ('', '"'):
- status, result = expand(part)
- part[:] = result
- else:
- raise NotImplementedError('%s expansion is not implemented'
- % part[0])
- # [] is returned when an expansion result in no-field,
- # like an empty $@
- wtree = [p for p in wtree if p != []]
- if len(wtree) < 3:
- return status, []
- return status, wtree
-
- status, wtree = expand(wtree)
- if len(wtree) == 0:
- return status, wtree
- wtree = pyshlex.normalize_wordtree(wtree)
-
- if split:
- wtrees = self._split_fields(wtree)
- else:
- wtrees = [wtree]
-
- if pathname:
- wtrees = mappend(self._expand_pathname, wtrees)
-
- wtrees = map(self._remove_quotes, wtrees)
- return status, wtrees
-
- def _expand_command(self, wtree):
- # BUG: there is something to do with backslashes and quoted
- # characters here
- command = pyshlex.wordtree_as_string(wtree[1:-1])
- status, output = self.subshell_output(command)
- return status, ['', output, '']
-
- def _expand_parameter(self, wtree, quoted=False, split=False):
- """Return a valid wtree or an empty list when no parameter results."""
- # Get the parameter name
- # TODO: implement weird expansion rules with ':'
- name = pyshlex.wordtree_as_string(wtree[1:-1])
- if not is_name(name) and not is_special_param(name):
- raise ExpansionError('Bad substitution "%s"' % name)
- # TODO: implement special parameters
- if name in ('@', '*'):
- args = self._env.get_positional_args()
- if len(args) == 0:
- return []
- if len(args)<2:
- return ['', ''.join(args), '']
-
- sep = self._env.get('IFS', '')[:1]
- if split and quoted and name=='@':
- # Introduce a new token to tell the caller that these parameters
- # cause a split as specified in 2.5.2
- return ['@'] + args + ['']
- else:
- return ['', sep.join(args), '']
-
- return ['', self._env.get(name, ''), '']
-
- def _split_fields(self, wtree):
- def is_empty(split):
- return split==['', '', '']
-
- def split_positional(quoted):
- # Return a list of wtree split according positional parameters rules.
- # All remaining '@' groups are removed.
- assert quoted[0]=='"'
-
- splits = [[]]
- for part in quoted:
- if not isinstance(part, list) or part[0]!='@':
- splits[-1].append(part)
- else:
- # Empty or single argument list were dealt with already
- assert len(part)>3
- # First argument must join with the beginning part of the original word
- splits[-1].append(part[1])
- # Create double-quotes expressions for every argument after the first
- for arg in part[2:-1]:
- splits[-1].append('"')
- splits.append(['"', arg])
- return splits
-
- # At this point, all expansions but pathnames have occured. Only quoted
- # and positional sequences remain. Thus, all candidates for field splitting
- # are in the tree root, or are positional splits ('@') and lie in root
- # children.
- if not wtree or wtree[0] not in ('', '"'):
- # The whole token is quoted or empty, nothing to split
- return [wtree]
-
- if wtree[0]=='"':
- wtree = ['', wtree, '']
-
- result = [['', '']]
- for part in wtree[1:-1]:
- if isinstance(part, list):
- if part[0]=='"':
- splits = split_positional(part)
- if len(splits)<=1:
- result[-1] += [part, '']
- else:
- # Terminate the current split
- result[-1] += [splits[0], '']
- result += splits[1:-1]
- # Create a new split
- result += [['', splits[-1], '']]
- else:
- result[-1] += [part, '']
- else:
- splits = self._env.split_fields(part)
- if len(splits)<=1:
- # No split
- result[-1][-1] += part
- else:
- # Terminate the current resulting part and create a new one
- result[-1][-1] += splits[0]
- result[-1].append('')
- result += [['', r, ''] for r in splits[1:-1]]
- result += [['', splits[-1]]]
- result[-1].append('')
-
- # Leading and trailing empty groups come from leading/trailing blanks
- if result and is_empty(result[-1]):
- result[-1:] = []
- if result and is_empty(result[0]):
- result[:1] = []
- return result
-
- def _expand_pathname(self, wtree):
- """See [2.6.6 Pathname Expansion]."""
- if self._env.has_opt('-f'):
- return [wtree]
-
- # All expansions have been performed, only quoted sequences should remain
- # in the tree. Generate the pattern by folding the tree, escaping special
- # characters when appear quoted
- special_chars = '*?[]'
-
- def make_pattern(wtree):
- subpattern = []
- for part in wtree[1:-1]:
- if isinstance(part, list):
- part = make_pattern(part)
- elif wtree[0]!='':
- for c in part:
- # Meta-characters cannot be quoted
- if c in special_chars:
- raise GlobError()
- subpattern.append(part)
- return ''.join(subpattern)
-
- def pwd_glob(pattern):
- cwd = os.getcwd()
- os.chdir(self._env['PWD'])
- try:
- return glob.glob(pattern)
- finally:
- os.chdir(cwd)
-
- #TODO: check working directory issues here wrt relative patterns
- try:
- pattern = make_pattern(wtree)
- paths = pwd_glob(pattern)
- except GlobError:
- # BUG: Meta-characters were found in quoted sequences. The should
- # have been used literally but this is unsupported in current glob module.
- # Instead we consider the whole tree must be used literally and
- # therefore there is no point in globbing. This is wrong when meta
- # characters are mixed with quoted meta in the same pattern like:
- # < foo*"py*" >
- paths = []
-
- if not paths:
- return [wtree]
- return [['', path, ''] for path in paths]
-
- def _remove_quotes(self, wtree):
- """See [2.6.7 Quote Removal]."""
-
- def unquote(wtree):
- unquoted = []
- for part in wtree[1:-1]:
- if isinstance(part, list):
- part = unquote(part)
- unquoted.append(part)
- return ''.join(unquoted)
-
- return ['', unquote(wtree), '']
-
- def subshell(self, script=None, ast=None, redirs=None):
- """Execute the script or AST in a subshell, with inherited redirections
- if redirs is not None.
- """
- if redirs:
- sub_redirs = redirs
- else:
- sub_redirs = redirs.clone()
-
- subshell = None
- try:
- subshell = Interpreter(None, self._debugflags, self._env.clone(True),
- sub_redirs, opts=self._options)
- return subshell.execute_script(script, ast)
- finally:
- if not redirs: sub_redirs.close()
- if subshell: subshell.close()
-
- def subshell_output(self, script):
- """Execute the script in a subshell and return the captured output."""
- # Create temporary file to capture subshell output
- tmpfd, tmppath = tempfile.mkstemp()
- try:
- tmpfile = os.fdopen(tmpfd, 'wb')
- stdout = FileWrapper('w', tmpfile)
-
- redirs = Redirections(self._redirs.stdin().dup(),
- stdout,
- self._redirs.stderr().dup())
- try:
- status = self.subshell(script=script, redirs=redirs)
- finally:
- redirs.close()
- redirs = None
-
- # Extract subshell standard output
- tmpfile = open(tmppath, 'rb')
- try:
- output = tmpfile.read()
- return status, output.rstrip('\n')
- finally:
- tmpfile.close()
- finally:
- os.remove(tmppath)
-
- def _asynclist(self, cmd):
- args = (self._env.get_variables(), cmd)
- arg = encodeargs(args)
- assert len(args) < 30*1024
- cmd = ['pysh.bat', '--ast', '-c', arg]
- p = subprocess.Popen(cmd, cwd=self._env['PWD'])
- self._children[p.pid] = p
- self._env['!'] = p.pid
- return 0
-
- def wait(self, pids=None):
- if not pids:
- pids = self._children.keys()
-
- status = 127
- for pid in pids:
- if pid not in self._children:
- continue
- p = self._children.pop(pid)
- status = p.wait()
-
- return status
-
diff --git a/bitbake/lib/bb/pysh/lsprof.py b/bitbake/lib/bb/pysh/lsprof.py
deleted file mode 100644
index b1831c22a7..0000000000
--- a/bitbake/lib/bb/pysh/lsprof.py
+++ /dev/null
@@ -1,116 +0,0 @@
-#! /usr/bin/env python
-
-import sys
-from _lsprof import Profiler, profiler_entry
-
-__all__ = ['profile', 'Stats']
-
-def profile(f, *args, **kwds):
- """XXX docstring"""
- p = Profiler()
- p.enable(subcalls=True, builtins=True)
- try:
- f(*args, **kwds)
- finally:
- p.disable()
- return Stats(p.getstats())
-
-
-class Stats(object):
- """XXX docstring"""
-
- def __init__(self, data):
- self.data = data
-
- def sort(self, crit="inlinetime"):
- """XXX docstring"""
- if crit not in profiler_entry.__dict__:
- raise ValueError("Can't sort by %s" % crit)
- self.data.sort(lambda b, a: cmp(getattr(a, crit),
- getattr(b, crit)))
- for e in self.data:
- if e.calls:
- e.calls.sort(lambda b, a: cmp(getattr(a, crit),
- getattr(b, crit)))
-
- def pprint(self, top=None, file=None, limit=None, climit=None):
- """XXX docstring"""
- if file is None:
- file = sys.stdout
- d = self.data
- if top is not None:
- d = d[:top]
- cols = "% 12s %12s %11.4f %11.4f %s\n"
- hcols = "% 12s %12s %12s %12s %s\n"
- cols2 = "+%12s %12s %11.4f %11.4f + %s\n"
- file.write(hcols % ("CallCount", "Recursive", "Total(ms)",
- "Inline(ms)", "module:lineno(function)"))
- count = 0
- for e in d:
- file.write(cols % (e.callcount, e.reccallcount, e.totaltime,
- e.inlinetime, label(e.code)))
- count += 1
- if limit is not None and count == limit:
- return
- ccount = 0
- if e.calls:
- for se in e.calls:
- file.write(cols % ("+%s" % se.callcount, se.reccallcount,
- se.totaltime, se.inlinetime,
- "+%s" % label(se.code)))
- count += 1
- ccount += 1
- if limit is not None and count == limit:
- return
- if climit is not None and ccount == climit:
- break
-
- def freeze(self):
- """Replace all references to code objects with string
- descriptions; this makes it possible to pickle the instance."""
-
- # this code is probably rather ickier than it needs to be!
- for i in range(len(self.data)):
- e = self.data[i]
- if not isinstance(e.code, str):
- self.data[i] = type(e)((label(e.code),) + e[1:])
- if e.calls:
- for j in range(len(e.calls)):
- se = e.calls[j]
- if not isinstance(se.code, str):
- e.calls[j] = type(se)((label(se.code),) + se[1:])
-
-_fn2mod = {}
-
-def label(code):
- if isinstance(code, str):
- return code
- try:
- mname = _fn2mod[code.co_filename]
- except KeyError:
- for k, v in sys.modules.items():
- if v is None:
- continue
- if not hasattr(v, '__file__'):
- continue
- if not isinstance(v.__file__, str):
- continue
- if v.__file__.startswith(code.co_filename):
- mname = _fn2mod[code.co_filename] = k
- break
- else:
- mname = _fn2mod[code.co_filename] = '<%s>'%code.co_filename
-
- return '%s:%d(%s)' % (mname, code.co_firstlineno, code.co_name)
-
-
-if __name__ == '__main__':
- import os
- sys.argv = sys.argv[1:]
- if not sys.argv:
- print >> sys.stderr, "usage: lsprof.py <script> <arguments...>"
- sys.exit(2)
- sys.path.insert(0, os.path.abspath(os.path.dirname(sys.argv[0])))
- stats = profile(execfile, sys.argv[0], globals(), locals())
- stats.sort()
- stats.pprint()
diff --git a/bitbake/lib/bb/pysh/pysh.py b/bitbake/lib/bb/pysh/pysh.py
deleted file mode 100644
index b4e6145b51..0000000000
--- a/bitbake/lib/bb/pysh/pysh.py
+++ /dev/null
@@ -1,167 +0,0 @@
-# pysh.py - command processing for pysh.
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-import optparse
-import os
-import sys
-
-import interp
-
-SH_OPT = optparse.OptionParser(prog='pysh', usage="%prog [OPTIONS]", version='0.1')
-SH_OPT.add_option('-c', action='store_true', dest='command_string', default=None,
- help='A string that shall be interpreted by the shell as one or more commands')
-SH_OPT.add_option('--redirect-to', dest='redirect_to', default=None,
- help='Redirect script commands stdout and stderr to the specified file')
-# See utility_command in builtin.py about the reason for this flag.
-SH_OPT.add_option('--redirected', dest='redirected', action='store_true', default=False,
- help='Tell the interpreter that stdout and stderr are actually the same objects, which is really stdout')
-SH_OPT.add_option('--debug-parsing', action='store_true', dest='debug_parsing', default=False,
- help='Trace PLY execution')
-SH_OPT.add_option('--debug-tree', action='store_true', dest='debug_tree', default=False,
- help='Display the generated syntax tree.')
-SH_OPT.add_option('--debug-cmd', action='store_true', dest='debug_cmd', default=False,
- help='Trace command execution before parameters expansion and exit status.')
-SH_OPT.add_option('--debug-utility', action='store_true', dest='debug_utility', default=False,
- help='Trace utility calls, after parameters expansions')
-SH_OPT.add_option('--ast', action='store_true', dest='ast', default=False,
- help='Encoded commands to execute in a subprocess')
-SH_OPT.add_option('--profile', action='store_true', default=False,
- help='Profile pysh run')
-
-
-def split_args(args):
- # Separate shell arguments from command ones
- # Just stop at the first argument not starting with a dash. I know, this is completely broken,
- # it ignores files starting with a dash or may take option values for command file. This is not
- # supposed to happen for now
- command_index = len(args)
- for i,arg in enumerate(args):
- if not arg.startswith('-'):
- command_index = i
- break
-
- return args[:command_index], args[command_index:]
-
-
-def fixenv(env):
- path = env.get('PATH')
- if path is not None:
- parts = path.split(os.pathsep)
- # Remove Windows utilities from PATH, they are useless at best and
- # some of them (find) may be confused with other utilities.
- parts = [p for p in parts if 'system32' not in p.lower()]
- env['PATH'] = os.pathsep.join(parts)
- if env.get('HOME') is None:
- # Several utilities, including cvsps, cannot work without
- # a defined HOME directory.
- env['HOME'] = os.path.expanduser('~')
- return env
-
-def _sh(cwd, shargs, cmdargs, options, debugflags=None, env=None):
- if os.environ.get('PYSH_TEXT') != '1':
- import msvcrt
- for fp in (sys.stdin, sys.stdout, sys.stderr):
- msvcrt.setmode(fp.fileno(), os.O_BINARY)
-
- hgbin = os.environ.get('PYSH_HGTEXT') != '1'
-
- if debugflags is None:
- debugflags = []
- if options.debug_parsing: debugflags.append('debug-parsing')
- if options.debug_utility: debugflags.append('debug-utility')
- if options.debug_cmd: debugflags.append('debug-cmd')
- if options.debug_tree: debugflags.append('debug-tree')
-
- if env is None:
- env = fixenv(dict(os.environ))
- if cwd is None:
- cwd = os.getcwd()
-
- if not cmdargs:
- # Nothing to do
- return 0
-
- ast = None
- command_file = None
- if options.command_string:
- input = cmdargs[0]
- if not options.ast:
- input += '\n'
- else:
- args, input = interp.decodeargs(input), None
- env, ast = args
- cwd = env.get('PWD', cwd)
- else:
- command_file = cmdargs[0]
- arguments = cmdargs[1:]
-
- prefix = interp.resolve_shebang(command_file, ignoreshell=True)
- if prefix:
- input = ' '.join(prefix + [command_file] + arguments)
- else:
- # Read commands from file
- f = file(command_file)
- try:
- # Trailing newline to help the parser
- input = f.read() + '\n'
- finally:
- f.close()
-
- redirect = None
- try:
- if options.redirected:
- stdout = sys.stdout
- stderr = stdout
- elif options.redirect_to:
- redirect = open(options.redirect_to, 'wb')
- stdout = redirect
- stderr = redirect
- else:
- stdout = sys.stdout
- stderr = sys.stderr
-
- # TODO: set arguments to environment variables
- opts = interp.Options()
- opts.hgbinary = hgbin
- ip = interp.Interpreter(cwd, debugflags, stdout=stdout, stderr=stderr,
- opts=opts)
- try:
- # Export given environment in shell object
- for k,v in env.iteritems():
- ip.get_env().export(k,v)
- return ip.execute_script(input, ast, scriptpath=command_file)
- finally:
- ip.close()
- finally:
- if redirect is not None:
- redirect.close()
-
-def sh(cwd=None, args=None, debugflags=None, env=None):
- if args is None:
- args = sys.argv[1:]
- shargs, cmdargs = split_args(args)
- options, shargs = SH_OPT.parse_args(shargs)
-
- if options.profile:
- import lsprof
- p = lsprof.Profiler()
- p.enable(subcalls=True)
- try:
- return _sh(cwd, shargs, cmdargs, options, debugflags, env)
- finally:
- p.disable()
- stats = lsprof.Stats(p.getstats())
- stats.sort()
- stats.pprint(top=10, file=sys.stderr, climit=5)
- else:
- return _sh(cwd, shargs, cmdargs, options, debugflags, env)
-
-def main():
- sys.exit(sh())
-
-if __name__=='__main__':
- main()
diff --git a/bitbake/lib/bb/pysh/pyshlex.py b/bitbake/lib/bb/pysh/pyshlex.py
deleted file mode 100644
index b977b5e869..0000000000
--- a/bitbake/lib/bb/pysh/pyshlex.py
+++ /dev/null
@@ -1,888 +0,0 @@
-# pyshlex.py - PLY compatible lexer for pysh.
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-# TODO:
-# - review all "char in 'abc'" snippets: the empty string can be matched
-# - test line continuations within quoted/expansion strings
-# - eof is buggy wrt sublexers
-# - the lexer cannot really work in pull mode as it would be required to run
-# PLY in pull mode. It was designed to work incrementally and it would not be
-# that hard to enable pull mode.
-import re
-try:
- s = set()
- del s
-except NameError:
- from Set import Set as set
-
-from ply import lex
-from sherrors import *
-
-class NeedMore(Exception):
- pass
-
-def is_blank(c):
- return c in (' ', '\t')
-
-_RE_DIGITS = re.compile(r'^\d+$')
-
-def are_digits(s):
- return _RE_DIGITS.search(s) is not None
-
-_OPERATORS = dict([
- ('&&', 'AND_IF'),
- ('||', 'OR_IF'),
- (';;', 'DSEMI'),
- ('<<', 'DLESS'),
- ('>>', 'DGREAT'),
- ('<&', 'LESSAND'),
- ('>&', 'GREATAND'),
- ('<>', 'LESSGREAT'),
- ('<<-', 'DLESSDASH'),
- ('>|', 'CLOBBER'),
- ('&', 'AMP'),
- (';', 'COMMA'),
- ('<', 'LESS'),
- ('>', 'GREATER'),
- ('(', 'LPARENS'),
- (')', 'RPARENS'),
-])
-
-#Make a function to silence pychecker "Local variable shadows global"
-def make_partial_ops():
- partials = {}
- for k in _OPERATORS:
- for i in range(1, len(k)+1):
- partials[k[:i]] = None
- return partials
-
-_PARTIAL_OPERATORS = make_partial_ops()
-
-def is_partial_op(s):
- """Return True if s matches a non-empty subpart of an operator starting
- at its first character.
- """
- return s in _PARTIAL_OPERATORS
-
-def is_op(s):
- """If s matches an operator, returns the operator identifier. Return None
- otherwise.
- """
- return _OPERATORS.get(s)
-
-_RESERVEDS = dict([
- ('if', 'If'),
- ('then', 'Then'),
- ('else', 'Else'),
- ('elif', 'Elif'),
- ('fi', 'Fi'),
- ('do', 'Do'),
- ('done', 'Done'),
- ('case', 'Case'),
- ('esac', 'Esac'),
- ('while', 'While'),
- ('until', 'Until'),
- ('for', 'For'),
- ('{', 'Lbrace'),
- ('}', 'Rbrace'),
- ('!', 'Bang'),
- ('in', 'In'),
- ('|', 'PIPE'),
-])
-
-def get_reserved(s):
- return _RESERVEDS.get(s)
-
-_RE_NAME = re.compile(r'^[0-9a-zA-Z_]+$')
-
-def is_name(s):
- return _RE_NAME.search(s) is not None
-
-def find_chars(seq, chars):
- for i,v in enumerate(seq):
- if v in chars:
- return i,v
- return -1, None
-
-class WordLexer:
- """WordLexer parse quoted or expansion expressions and return an expression
- tree. The input string can be any well formed sequence beginning with quoting
- or expansion character. Embedded expressions are handled recursively. The
- resulting tree is made of lists and strings. Lists represent quoted or
- expansion expressions. Each list first element is the opening separator,
- the last one the closing separator. In-between can be any number of strings
- or lists for sub-expressions. Non quoted/expansion expression can written as
- strings or as lists with empty strings as starting and ending delimiters.
- """
-
- NAME_CHARSET = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
- NAME_CHARSET = dict(zip(NAME_CHARSET, NAME_CHARSET))
-
- SPECIAL_CHARSET = '@*#?-$!0'
-
- #Characters which can be escaped depends on the current delimiters
- ESCAPABLE = {
- '`': set(['$', '\\', '`']),
- '"': set(['$', '\\', '`', '"']),
- "'": set(),
- }
-
- def __init__(self, heredoc = False):
- # _buffer is the unprocessed input characters buffer
- self._buffer = []
- # _stack is empty or contains a quoted list being processed
- # (this is the DFS path to the quoted expression being evaluated).
- self._stack = []
- self._escapable = None
- # True when parsing unquoted here documents
- self._heredoc = heredoc
-
- def add(self, data, eof=False):
- """Feed the lexer with more data. If the quoted expression can be
- delimited, return a tuple (expr, remaining) containing the expression
- tree and the unconsumed data.
- Otherwise, raise NeedMore.
- """
- self._buffer += list(data)
- self._parse(eof)
-
- result = self._stack[0]
- remaining = ''.join(self._buffer)
- self._stack = []
- self._buffer = []
- return result, remaining
-
- def _is_escapable(self, c, delim=None):
- if delim is None:
- if self._heredoc:
- # Backslashes works as if they were double quoted in unquoted
- # here-documents
- delim = '"'
- else:
- if len(self._stack)<=1:
- return True
- delim = self._stack[-2][0]
-
- escapables = self.ESCAPABLE.get(delim, None)
- return escapables is None or c in escapables
-
- def _parse_squote(self, buf, result, eof):
- if not buf:
- raise NeedMore()
- try:
- pos = buf.index("'")
- except ValueError:
- raise NeedMore()
- result[-1] += ''.join(buf[:pos])
- result += ["'"]
- return pos+1, True
-
- def _parse_bquote(self, buf, result, eof):
- if not buf:
- raise NeedMore()
-
- if buf[0]=='\n':
- #Remove line continuations
- result[:] = ['', '', '']
- elif self._is_escapable(buf[0]):
- result[-1] += buf[0]
- result += ['']
- else:
- #Keep as such
- result[:] = ['', '\\'+buf[0], '']
-
- return 1, True
-
- def _parse_dquote(self, buf, result, eof):
- if not buf:
- raise NeedMore()
- pos, sep = find_chars(buf, '$\\`"')
- if pos==-1:
- raise NeedMore()
-
- result[-1] += ''.join(buf[:pos])
- if sep=='"':
- result += ['"']
- return pos+1, True
- else:
- #Keep everything until the separator and defer processing
- return pos, False
-
- def _parse_command(self, buf, result, eof):
- if not buf:
- raise NeedMore()
-
- chars = '$\\`"\''
- if result[0] == '$(':
- chars += ')'
- pos, sep = find_chars(buf, chars)
- if pos == -1:
- raise NeedMore()
-
- result[-1] += ''.join(buf[:pos])
- if (result[0]=='$(' and sep==')') or (result[0]=='`' and sep=='`'):
- result += [sep]
- return pos+1, True
- else:
- return pos, False
-
- def _parse_parameter(self, buf, result, eof):
- if not buf:
- raise NeedMore()
-
- pos, sep = find_chars(buf, '$\\`"\'}')
- if pos==-1:
- raise NeedMore()
-
- result[-1] += ''.join(buf[:pos])
- if sep=='}':
- result += [sep]
- return pos+1, True
- else:
- return pos, False
-
- def _parse_dollar(self, buf, result, eof):
- sep = result[0]
- if sep=='$':
- if not buf:
- #TODO: handle empty $
- raise NeedMore()
- if buf[0]=='(':
- if len(buf)==1:
- raise NeedMore()
-
- if buf[1]=='(':
- result[0] = '$(('
- buf[:2] = []
- else:
- result[0] = '$('
- buf[:1] = []
-
- elif buf[0]=='{':
- result[0] = '${'
- buf[:1] = []
- else:
- if buf[0] in self.SPECIAL_CHARSET:
- result[-1] = buf[0]
- read = 1
- else:
- for read,c in enumerate(buf):
- if c not in self.NAME_CHARSET:
- break
- else:
- if not eof:
- raise NeedMore()
- read += 1
-
- result[-1] += ''.join(buf[0:read])
-
- if not result[-1]:
- result[:] = ['', result[0], '']
- else:
- result += ['']
- return read,True
-
- sep = result[0]
- if sep=='$(':
- parsefunc = self._parse_command
- elif sep=='${':
- parsefunc = self._parse_parameter
- else:
- raise NotImplementedError()
-
- pos, closed = parsefunc(buf, result, eof)
- return pos, closed
-
- def _parse(self, eof):
- buf = self._buffer
- stack = self._stack
- recurse = False
-
- while 1:
- if not stack or recurse:
- if not buf:
- raise NeedMore()
- if buf[0] not in ('"\\`$\''):
- raise ShellSyntaxError('Invalid quoted string sequence')
- stack.append([buf[0], ''])
- buf[:1] = []
- recurse = False
-
- result = stack[-1]
- if result[0]=="'":
- parsefunc = self._parse_squote
- elif result[0]=='\\':
- parsefunc = self._parse_bquote
- elif result[0]=='"':
- parsefunc = self._parse_dquote
- elif result[0]=='`':
- parsefunc = self._parse_command
- elif result[0][0]=='$':
- parsefunc = self._parse_dollar
- else:
- raise NotImplementedError()
-
- read, closed = parsefunc(buf, result, eof)
-
- buf[:read] = []
- if closed:
- if len(stack)>1:
- #Merge in parent expression
- parsed = stack.pop()
- stack[-1] += [parsed]
- stack[-1] += ['']
- else:
- break
- else:
- recurse = True
-
-def normalize_wordtree(wtree):
- """Fold back every literal sequence (delimited with empty strings) into
- parent sequence.
- """
- def normalize(wtree):
- result = []
- for part in wtree[1:-1]:
- if isinstance(part, list):
- part = normalize(part)
- if part[0]=='':
- #Move the part content back at current level
- result += part[1:-1]
- continue
- elif not part:
- #Remove empty strings
- continue
- result.append(part)
- if not result:
- result = ['']
- return [wtree[0]] + result + [wtree[-1]]
-
- return normalize(wtree)
-
-
-def make_wordtree(token, here_document=False):
- """Parse a delimited token and return a tree similar to the ones returned by
- WordLexer. token may contain any combinations of expansion/quoted fields and
- non-ones.
- """
- tree = ['']
- remaining = token
- delimiters = '\\$`'
- if not here_document:
- delimiters += '\'"'
-
- while 1:
- pos, sep = find_chars(remaining, delimiters)
- if pos==-1:
- tree += [remaining, '']
- return normalize_wordtree(tree)
- tree.append(remaining[:pos])
- remaining = remaining[pos:]
-
- try:
- result, remaining = WordLexer(heredoc = here_document).add(remaining, True)
- except NeedMore:
- raise ShellSyntaxError('Invalid token "%s"')
- tree.append(result)
-
-
-def wordtree_as_string(wtree):
- """Rewrite an expression tree generated by make_wordtree as string."""
- def visit(node, output):
- for child in node:
- if isinstance(child, list):
- visit(child, output)
- else:
- output.append(child)
-
- output = []
- visit(wtree, output)
- return ''.join(output)
-
-
-def unquote_wordtree(wtree):
- """Fold the word tree while removing quotes everywhere. Other expansion
- sequences are joined as such.
- """
- def unquote(wtree):
- unquoted = []
- if wtree[0] in ('', "'", '"', '\\'):
- wtree = wtree[1:-1]
-
- for part in wtree:
- if isinstance(part, list):
- part = unquote(part)
- unquoted.append(part)
- return ''.join(unquoted)
-
- return unquote(wtree)
-
-
-class HereDocLexer:
- """HereDocLexer delimits whatever comes from the here-document starting newline
- not included to the closing delimiter line included.
- """
- def __init__(self, op, delim):
- assert op in ('<<', '<<-')
- if not delim:
- raise ShellSyntaxError('invalid here document delimiter %s' % str(delim))
-
- self._op = op
- self._delim = delim
- self._buffer = []
- self._token = []
-
- def add(self, data, eof):
- """If the here-document was delimited, return a tuple (content, remaining).
- Raise NeedMore() otherwise.
- """
- self._buffer += list(data)
- self._parse(eof)
- token = ''.join(self._token)
- remaining = ''.join(self._buffer)
- self._token, self._remaining = [], []
- return token, remaining
-
- def _parse(self, eof):
- while 1:
- #Look for first unescaped newline. Quotes may be ignored
- escaped = False
- for i,c in enumerate(self._buffer):
- if escaped:
- escaped = False
- elif c=='\\':
- escaped = True
- elif c=='\n':
- break
- else:
- i = -1
-
- if i==-1 or self._buffer[i]!='\n':
- if not eof:
- raise NeedMore()
- #No more data, maybe the last line is closing delimiter
- line = ''.join(self._buffer)
- eol = ''
- self._buffer[:] = []
- else:
- line = ''.join(self._buffer[:i])
- eol = self._buffer[i]
- self._buffer[:i+1] = []
-
- if self._op=='<<-':
- line = line.lstrip('\t')
-
- if line==self._delim:
- break
-
- self._token += [line, eol]
- if i==-1:
- break
-
-class Token:
- #TODO: check this is still in use
- OPERATOR = 'OPERATOR'
- WORD = 'WORD'
-
- def __init__(self):
- self.value = ''
- self.type = None
-
- def __getitem__(self, key):
- #Behave like a two elements tuple
- if key==0:
- return self.type
- if key==1:
- return self.value
- raise IndexError(key)
-
-
-class HereDoc:
- def __init__(self, op, name=None):
- self.op = op
- self.name = name
- self.pendings = []
-
-TK_COMMA = 'COMMA'
-TK_AMPERSAND = 'AMP'
-TK_OP = 'OP'
-TK_TOKEN = 'TOKEN'
-TK_COMMENT = 'COMMENT'
-TK_NEWLINE = 'NEWLINE'
-TK_IONUMBER = 'IO_NUMBER'
-TK_ASSIGNMENT = 'ASSIGNMENT_WORD'
-TK_HERENAME = 'HERENAME'
-
-class Lexer:
- """Main lexer.
-
- Call add() until the script AST is returned.
- """
- # Here-document handling makes the whole thing more complex because they basically
- # force tokens to be reordered: here-content must come right after the operator
- # and the here-document name, while some other tokens might be following the
- # here-document expression on the same line.
- #
- # So, here-doc states are basically:
- # *self._state==ST_NORMAL
- # - self._heredoc.op is None: no here-document
- # - self._heredoc.op is not None but name is: here-document operator matched,
- # waiting for the document name/delimiter
- # - self._heredoc.op and name are not None: here-document is ready, following
- # tokens are being stored and will be pushed again when the document is
- # completely parsed.
- # *self._state==ST_HEREDOC
- # - The here-document is being delimited by self._herelexer. Once it is done
- # the content is pushed in front of the pending token list then all these
- # tokens are pushed once again.
- ST_NORMAL = 'ST_NORMAL'
- ST_OP = 'ST_OP'
- ST_BACKSLASH = 'ST_BACKSLASH'
- ST_QUOTED = 'ST_QUOTED'
- ST_COMMENT = 'ST_COMMENT'
- ST_HEREDOC = 'ST_HEREDOC'
-
- #Match end of backquote strings
- RE_BACKQUOTE_END = re.compile(r'(?<!\\)(`)')
-
- def __init__(self, parent_state = None):
- self._input = []
- self._pos = 0
-
- self._token = ''
- self._type = TK_TOKEN
-
- self._state = self.ST_NORMAL
- self._parent_state = parent_state
- self._wordlexer = None
-
- self._heredoc = HereDoc(None)
- self._herelexer = None
-
- ### Following attributes are not used for delimiting token and can safely
- ### be changed after here-document detection (see _push_toke)
-
- # Count the number of tokens following a 'For' reserved word. Needed to
- # return an 'In' reserved word if it comes in third place.
- self._for_count = None
-
- def add(self, data, eof=False):
- """Feed the lexer with data.
-
- When eof is set to True, returns unconsumed data or raise if the lexer
- is in the middle of a delimiting operation.
- Raise NeedMore otherwise.
- """
- self._input += list(data)
- self._parse(eof)
- self._input[:self._pos] = []
- return ''.join(self._input)
-
- def _parse(self, eof):
- while self._state:
- if self._pos>=len(self._input):
- if not eof:
- raise NeedMore()
- elif self._state not in (self.ST_OP, self.ST_QUOTED, self.ST_HEREDOC):
- #Delimit the current token and leave cleanly
- self._push_token('')
- break
- else:
- #Let the sublexer handle the eof themselves
- pass
-
- if self._state==self.ST_NORMAL:
- self._parse_normal()
- elif self._state==self.ST_COMMENT:
- self._parse_comment()
- elif self._state==self.ST_OP:
- self._parse_op(eof)
- elif self._state==self.ST_QUOTED:
- self._parse_quoted(eof)
- elif self._state==self.ST_HEREDOC:
- self._parse_heredoc(eof)
- else:
- assert False, "Unknown state " + str(self._state)
-
- if self._heredoc.op is not None:
- raise ShellSyntaxError('missing here-document delimiter')
-
- def _parse_normal(self):
- c = self._input[self._pos]
- if c=='\n':
- self._push_token(c)
- self._token = c
- self._type = TK_NEWLINE
- self._push_token('')
- self._pos += 1
- elif c in ('\\', '\'', '"', '`', '$'):
- self._state = self.ST_QUOTED
- elif is_partial_op(c):
- self._push_token(c)
-
- self._type = TK_OP
- self._token += c
- self._pos += 1
- self._state = self.ST_OP
- elif is_blank(c):
- self._push_token(c)
-
- #Discard blanks
- self._pos += 1
- elif self._token:
- self._token += c
- self._pos += 1
- elif c=='#':
- self._state = self.ST_COMMENT
- self._type = TK_COMMENT
- self._pos += 1
- else:
- self._pos += 1
- self._token += c
-
- def _parse_op(self, eof):
- assert self._token
-
- while 1:
- if self._pos>=len(self._input):
- if not eof:
- raise NeedMore()
- c = ''
- else:
- c = self._input[self._pos]
-
- op = self._token + c
- if c and is_partial_op(op):
- #Still parsing an operator
- self._token = op
- self._pos += 1
- else:
- #End of operator
- self._push_token(c)
- self._state = self.ST_NORMAL
- break
-
- def _parse_comment(self):
- while 1:
- if self._pos>=len(self._input):
- raise NeedMore()
-
- c = self._input[self._pos]
- if c=='\n':
- #End of comment, do not consume the end of line
- self._state = self.ST_NORMAL
- break
- else:
- self._token += c
- self._pos += 1
-
- def _parse_quoted(self, eof):
- """Precondition: the starting backquote/dollar is still in the input queue."""
- if not self._wordlexer:
- self._wordlexer = WordLexer()
-
- if self._pos<len(self._input):
- #Transfer input queue character into the subparser
- input = self._input[self._pos:]
- self._pos += len(input)
-
- wtree, remaining = self._wordlexer.add(input, eof)
- self._wordlexer = None
- self._token += wordtree_as_string(wtree)
-
- #Put unparsed character back in the input queue
- if remaining:
- self._input[self._pos:self._pos] = list(remaining)
- self._state = self.ST_NORMAL
-
- def _parse_heredoc(self, eof):
- assert not self._token
-
- if self._herelexer is None:
- self._herelexer = HereDocLexer(self._heredoc.op, self._heredoc.name)
-
- if self._pos<len(self._input):
- #Transfer input queue character into the subparser
- input = self._input[self._pos:]
- self._pos += len(input)
-
- self._token, remaining = self._herelexer.add(input, eof)
-
- #Reset here-document state
- self._herelexer = None
- heredoc, self._heredoc = self._heredoc, HereDoc(None)
- if remaining:
- self._input[self._pos:self._pos] = list(remaining)
- self._state = self.ST_NORMAL
-
- #Push pending tokens
- heredoc.pendings[:0] = [(self._token, self._type, heredoc.name)]
- for token, type, delim in heredoc.pendings:
- self._token = token
- self._type = type
- self._push_token(delim)
-
- def _push_token(self, delim):
- if not self._token:
- return 0
-
- if self._heredoc.op is not None:
- if self._heredoc.name is None:
- #Here-document name
- if self._type!=TK_TOKEN:
- raise ShellSyntaxError("expecting here-document name, got '%s'" % self._token)
- self._heredoc.name = unquote_wordtree(make_wordtree(self._token))
- self._type = TK_HERENAME
- else:
- #Capture all tokens until the newline starting the here-document
- if self._type==TK_NEWLINE:
- assert self._state==self.ST_NORMAL
- self._state = self.ST_HEREDOC
-
- self._heredoc.pendings.append((self._token, self._type, delim))
- self._token = ''
- self._type = TK_TOKEN
- return 1
-
- # BEWARE: do not change parser state from here to the end of the function:
- # when parsing between an here-document operator to the end of the line
- # tokens are stored in self._heredoc.pendings. Therefore, they will not
- # reach the section below.
-
- #Check operators
- if self._type==TK_OP:
- #False positive because of partial op matching
- op = is_op(self._token)
- if not op:
- self._type = TK_TOKEN
- else:
- #Map to the specific operator
- self._type = op
- if self._token in ('<<', '<<-'):
- #Done here rather than in _parse_op because there is no need
- #to change the parser state since we are still waiting for
- #the here-document name
- if self._heredoc.op is not None:
- raise ShellSyntaxError("syntax error near token '%s'" % self._token)
- assert self._heredoc.op is None
- self._heredoc.op = self._token
-
- if self._type==TK_TOKEN:
- if '=' in self._token and not delim:
- if self._token.startswith('='):
- #Token is a WORD... a TOKEN that is.
- pass
- else:
- prev = self._token[:self._token.find('=')]
- if is_name(prev):
- self._type = TK_ASSIGNMENT
- else:
- #Just a token (unspecified)
- pass
- else:
- reserved = get_reserved(self._token)
- if reserved is not None:
- if reserved=='In' and self._for_count!=2:
- #Sorry, not a reserved word after all
- pass
- else:
- self._type = reserved
- if reserved in ('For', 'Case'):
- self._for_count = 0
- elif are_digits(self._token) and delim in ('<', '>'):
- #Detect IO_NUMBER
- self._type = TK_IONUMBER
- elif self._token==';':
- self._type = TK_COMMA
- elif self._token=='&':
- self._type = TK_AMPERSAND
- elif self._type==TK_COMMENT:
- #Comments are not part of sh grammar, ignore them
- self._token = ''
- self._type = TK_TOKEN
- return 0
-
- if self._for_count is not None:
- #Track token count in 'For' expression to detect 'In' reserved words.
- #Can only be in third position, no need to go beyond
- self._for_count += 1
- if self._for_count==3:
- self._for_count = None
-
- self.on_token((self._token, self._type))
- self._token = ''
- self._type = TK_TOKEN
- return 1
-
- def on_token(self, token):
- raise NotImplementedError
-
-
-tokens = [
- TK_TOKEN,
-# To silence yacc unused token warnings
-# TK_COMMENT,
- TK_NEWLINE,
- TK_IONUMBER,
- TK_ASSIGNMENT,
- TK_HERENAME,
-]
-
-#Add specific operators
-tokens += _OPERATORS.values()
-#Add reserved words
-tokens += _RESERVEDS.values()
-
-class PLYLexer(Lexer):
- """Bridge Lexer and PLY lexer interface."""
- def __init__(self):
- Lexer.__init__(self)
- self._tokens = []
- self._current = 0
- self.lineno = 0
-
- def on_token(self, token):
- value, type = token
-
- self.lineno = 0
- t = lex.LexToken()
- t.value = value
- t.type = type
- t.lexer = self
- t.lexpos = 0
- t.lineno = 0
-
- self._tokens.append(t)
-
- def is_empty(self):
- return not bool(self._tokens)
-
- #PLY compliant interface
- def token(self):
- if self._current>=len(self._tokens):
- return None
- t = self._tokens[self._current]
- self._current += 1
- return t
-
-
-def get_tokens(s):
- """Parse the input string and return a tuple (tokens, unprocessed) where
- tokens is a list of parsed tokens and unprocessed is the part of the input
- string left untouched by the lexer.
- """
- lexer = PLYLexer()
- untouched = lexer.add(s, True)
- tokens = []
- while 1:
- token = lexer.token()
- if token is None:
- break
- tokens.append(token)
-
- tokens = [(t.value, t.type) for t in tokens]
- return tokens, untouched
diff --git a/bitbake/lib/bb/pysh/pyshyacc.py b/bitbake/lib/bb/pysh/pyshyacc.py
deleted file mode 100644
index e8e80aac45..0000000000
--- a/bitbake/lib/bb/pysh/pyshyacc.py
+++ /dev/null
@@ -1,779 +0,0 @@
-# pyshyacc.py - PLY grammar definition for pysh
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-"""PLY grammar file.
-"""
-import os.path
-import sys
-
-import pyshlex
-tokens = pyshlex.tokens
-
-from ply import yacc
-import sherrors
-
-class IORedirect:
- def __init__(self, op, filename, io_number=None):
- self.op = op
- self.filename = filename
- self.io_number = io_number
-
-class HereDocument:
- def __init__(self, op, name, content, io_number=None):
- self.op = op
- self.name = name
- self.content = content
- self.io_number = io_number
-
-def make_io_redirect(p):
- """Make an IORedirect instance from the input 'io_redirect' production."""
- name, io_number, io_target = p
- assert name=='io_redirect'
-
- if io_target[0]=='io_file':
- io_type, io_op, io_file = io_target
- return IORedirect(io_op, io_file, io_number)
- elif io_target[0]=='io_here':
- io_type, io_op, io_name, io_content = io_target
- return HereDocument(io_op, io_name, io_content, io_number)
- else:
- assert False, "Invalid IO redirection token %s" % repr(io_type)
-
-class SimpleCommand:
- """
- assigns contains (name, value) pairs.
- """
- def __init__(self, words, redirs, assigns):
- self.words = list(words)
- self.redirs = list(redirs)
- self.assigns = list(assigns)
-
-class Pipeline:
- def __init__(self, commands, reverse_status=False):
- self.commands = list(commands)
- assert self.commands #Grammar forbids this
- self.reverse_status = reverse_status
-
-class AndOr:
- def __init__(self, op, left, right):
- self.op = str(op)
- self.left = left
- self.right = right
-
-class ForLoop:
- def __init__(self, name, items, cmds):
- self.name = str(name)
- self.items = list(items)
- self.cmds = list(cmds)
-
-class WhileLoop:
- def __init__(self, condition, cmds):
- self.condition = list(condition)
- self.cmds = list(cmds)
-
-class UntilLoop:
- def __init__(self, condition, cmds):
- self.condition = list(condition)
- self.cmds = list(cmds)
-
-class FunDef:
- def __init__(self, name, body):
- self.name = str(name)
- self.body = body
-
-class BraceGroup:
- def __init__(self, cmds):
- self.cmds = list(cmds)
-
-class IfCond:
- def __init__(self, cond, if_cmds, else_cmds):
- self.cond = list(cond)
- self.if_cmds = if_cmds
- self.else_cmds = else_cmds
-
-class Case:
- def __init__(self, name, items):
- self.name = name
- self.items = items
-
-class SubShell:
- def __init__(self, cmds):
- self.cmds = cmds
-
-class RedirectList:
- def __init__(self, cmd, redirs):
- self.cmd = cmd
- self.redirs = list(redirs)
-
-def get_production(productions, ptype):
- """productions must be a list of production tuples like (name, obj) where
- name is the production string identifier.
- Return the first production named 'ptype'. Raise KeyError if None can be
- found.
- """
- for production in productions:
- if production is not None and production[0]==ptype:
- return production
- raise KeyError(ptype)
-
-#-------------------------------------------------------------------------------
-# PLY grammar definition
-#-------------------------------------------------------------------------------
-
-def p_multiple_commands(p):
- """multiple_commands : newline_sequence
- | complete_command
- | multiple_commands complete_command"""
- if len(p)==2:
- if p[1] is not None:
- p[0] = [p[1]]
- else:
- p[0] = []
- else:
- p[0] = p[1] + [p[2]]
-
-def p_complete_command(p):
- """complete_command : list separator
- | list"""
- if len(p)==3 and p[2] and p[2][1] == '&':
- p[0] = ('async', p[1])
- else:
- p[0] = p[1]
-
-def p_list(p):
- """list : list separator_op and_or
- | and_or"""
- if len(p)==2:
- p[0] = [p[1]]
- else:
- #if p[2]!=';':
- # raise NotImplementedError('AND-OR list asynchronous execution is not implemented')
- p[0] = p[1] + [p[3]]
-
-def p_and_or(p):
- """and_or : pipeline
- | and_or AND_IF linebreak pipeline
- | and_or OR_IF linebreak pipeline"""
- if len(p)==2:
- p[0] = p[1]
- else:
- p[0] = ('and_or', AndOr(p[2], p[1], p[4]))
-
-def p_maybe_bang_word(p):
- """maybe_bang_word : Bang"""
- p[0] = ('maybe_bang_word', p[1])
-
-def p_pipeline(p):
- """pipeline : pipe_sequence
- | bang_word pipe_sequence"""
- if len(p)==3:
- p[0] = ('pipeline', Pipeline(p[2][1:], True))
- else:
- p[0] = ('pipeline', Pipeline(p[1][1:]))
-
-def p_pipe_sequence(p):
- """pipe_sequence : command
- | pipe_sequence PIPE linebreak command"""
- if len(p)==2:
- p[0] = ['pipe_sequence', p[1]]
- else:
- p[0] = p[1] + [p[4]]
-
-def p_command(p):
- """command : simple_command
- | compound_command
- | compound_command redirect_list
- | function_definition"""
-
- if p[1][0] in ( 'simple_command',
- 'for_clause',
- 'while_clause',
- 'until_clause',
- 'case_clause',
- 'if_clause',
- 'function_definition',
- 'subshell',
- 'brace_group',):
- if len(p) == 2:
- p[0] = p[1]
- else:
- p[0] = ('redirect_list', RedirectList(p[1], p[2][1:]))
- else:
- raise NotImplementedError('%s command is not implemented' % repr(p[1][0]))
-
-def p_compound_command(p):
- """compound_command : brace_group
- | subshell
- | for_clause
- | case_clause
- | if_clause
- | while_clause
- | until_clause"""
- p[0] = p[1]
-
-def p_subshell(p):
- """subshell : LPARENS compound_list RPARENS"""
- p[0] = ('subshell', SubShell(p[2][1:]))
-
-def p_compound_list(p):
- """compound_list : term
- | newline_list term
- | term separator
- | newline_list term separator"""
- productions = p[1:]
- try:
- sep = get_production(productions, 'separator')
- if sep[1]!=';':
- raise NotImplementedError()
- except KeyError:
- pass
- term = get_production(productions, 'term')
- p[0] = ['compound_list'] + term[1:]
-
-def p_term(p):
- """term : term separator and_or
- | and_or"""
- if len(p)==2:
- p[0] = ['term', p[1]]
- else:
- if p[2] is not None and p[2][1] == '&':
- p[0] = ['term', ('async', p[1][1:])] + [p[3]]
- else:
- p[0] = p[1] + [p[3]]
-
-def p_maybe_for_word(p):
- # Rearrange 'For' priority wrt TOKEN. See p_for_word
- """maybe_for_word : For"""
- p[0] = ('maybe_for_word', p[1])
-
-def p_for_clause(p):
- """for_clause : for_word name linebreak do_group
- | for_word name linebreak in sequential_sep do_group
- | for_word name linebreak in wordlist sequential_sep do_group"""
- productions = p[1:]
- do_group = get_production(productions, 'do_group')
- try:
- items = get_production(productions, 'in')[1:]
- except KeyError:
- raise NotImplementedError('"in" omission is not implemented')
-
- try:
- items = get_production(productions, 'wordlist')[1:]
- except KeyError:
- items = []
-
- name = p[2]
- p[0] = ('for_clause', ForLoop(name, items, do_group[1:]))
-
-def p_name(p):
- """name : token""" #Was NAME instead of token
- p[0] = p[1]
-
-def p_in(p):
- """in : In"""
- p[0] = ('in', p[1])
-
-def p_wordlist(p):
- """wordlist : wordlist token
- | token"""
- if len(p)==2:
- p[0] = ['wordlist', ('TOKEN', p[1])]
- else:
- p[0] = p[1] + [('TOKEN', p[2])]
-
-def p_case_clause(p):
- """case_clause : Case token linebreak in linebreak case_list Esac
- | Case token linebreak in linebreak case_list_ns Esac
- | Case token linebreak in linebreak Esac"""
- if len(p) < 8:
- items = []
- else:
- items = p[6][1:]
- name = p[2]
- p[0] = ('case_clause', Case(name, [c[1] for c in items]))
-
-def p_case_list_ns(p):
- """case_list_ns : case_list case_item_ns
- | case_item_ns"""
- p_case_list(p)
-
-def p_case_list(p):
- """case_list : case_list case_item
- | case_item"""
- if len(p)==2:
- p[0] = ['case_list', p[1]]
- else:
- p[0] = p[1] + [p[2]]
-
-def p_case_item_ns(p):
- """case_item_ns : pattern RPARENS linebreak
- | pattern RPARENS compound_list linebreak
- | LPARENS pattern RPARENS linebreak
- | LPARENS pattern RPARENS compound_list linebreak"""
- p_case_item(p)
-
-def p_case_item(p):
- """case_item : pattern RPARENS linebreak DSEMI linebreak
- | pattern RPARENS compound_list DSEMI linebreak
- | LPARENS pattern RPARENS linebreak DSEMI linebreak
- | LPARENS pattern RPARENS compound_list DSEMI linebreak"""
- if len(p) < 7:
- name = p[1][1:]
- else:
- name = p[2][1:]
-
- try:
- cmds = get_production(p[1:], "compound_list")[1:]
- except KeyError:
- cmds = []
-
- p[0] = ('case_item', (name, cmds))
-
-def p_pattern(p):
- """pattern : token
- | pattern PIPE token"""
- if len(p)==2:
- p[0] = ['pattern', ('TOKEN', p[1])]
- else:
- p[0] = p[1] + [('TOKEN', p[2])]
-
-def p_maybe_if_word(p):
- # Rearrange 'If' priority wrt TOKEN. See p_if_word
- """maybe_if_word : If"""
- p[0] = ('maybe_if_word', p[1])
-
-def p_maybe_then_word(p):
- # Rearrange 'Then' priority wrt TOKEN. See p_then_word
- """maybe_then_word : Then"""
- p[0] = ('maybe_then_word', p[1])
-
-def p_if_clause(p):
- """if_clause : if_word compound_list then_word compound_list else_part Fi
- | if_word compound_list then_word compound_list Fi"""
- else_part = []
- if len(p)==7:
- else_part = p[5]
- p[0] = ('if_clause', IfCond(p[2][1:], p[4][1:], else_part))
-
-def p_else_part(p):
- """else_part : Elif compound_list then_word compound_list else_part
- | Elif compound_list then_word compound_list
- | Else compound_list"""
- if len(p)==3:
- p[0] = p[2][1:]
- else:
- else_part = []
- if len(p)==6:
- else_part = p[5]
- p[0] = ('elif', IfCond(p[2][1:], p[4][1:], else_part))
-
-def p_while_clause(p):
- """while_clause : While compound_list do_group"""
- p[0] = ('while_clause', WhileLoop(p[2][1:], p[3][1:]))
-
-def p_maybe_until_word(p):
- # Rearrange 'Until' priority wrt TOKEN. See p_until_word
- """maybe_until_word : Until"""
- p[0] = ('maybe_until_word', p[1])
-
-def p_until_clause(p):
- """until_clause : until_word compound_list do_group"""
- p[0] = ('until_clause', UntilLoop(p[2][1:], p[3][1:]))
-
-def p_function_definition(p):
- """function_definition : fname LPARENS RPARENS linebreak function_body"""
- p[0] = ('function_definition', FunDef(p[1], p[5]))
-
-def p_function_body(p):
- """function_body : compound_command
- | compound_command redirect_list"""
- if len(p)!=2:
- raise NotImplementedError('functions redirections lists are not implemented')
- p[0] = p[1]
-
-def p_fname(p):
- """fname : TOKEN""" #Was NAME instead of token
- p[0] = p[1]
-
-def p_brace_group(p):
- """brace_group : Lbrace compound_list Rbrace"""
- p[0] = ('brace_group', BraceGroup(p[2][1:]))
-
-def p_maybe_done_word(p):
- #See p_assignment_word for details.
- """maybe_done_word : Done"""
- p[0] = ('maybe_done_word', p[1])
-
-def p_maybe_do_word(p):
- """maybe_do_word : Do"""
- p[0] = ('maybe_do_word', p[1])
-
-def p_do_group(p):
- """do_group : do_word compound_list done_word"""
- #Do group contains a list of AndOr
- p[0] = ['do_group'] + p[2][1:]
-
-def p_simple_command(p):
- """simple_command : cmd_prefix cmd_word cmd_suffix
- | cmd_prefix cmd_word
- | cmd_prefix
- | cmd_name cmd_suffix
- | cmd_name"""
- words, redirs, assigns = [], [], []
- for e in p[1:]:
- name = e[0]
- if name in ('cmd_prefix', 'cmd_suffix'):
- for sube in e[1:]:
- subname = sube[0]
- if subname=='io_redirect':
- redirs.append(make_io_redirect(sube))
- elif subname=='ASSIGNMENT_WORD':
- assigns.append(sube)
- else:
- words.append(sube)
- elif name in ('cmd_word', 'cmd_name'):
- words.append(e)
-
- cmd = SimpleCommand(words, redirs, assigns)
- p[0] = ('simple_command', cmd)
-
-def p_cmd_name(p):
- """cmd_name : TOKEN"""
- p[0] = ('cmd_name', p[1])
-
-def p_cmd_word(p):
- """cmd_word : token"""
- p[0] = ('cmd_word', p[1])
-
-def p_maybe_assignment_word(p):
- #See p_assignment_word for details.
- """maybe_assignment_word : ASSIGNMENT_WORD"""
- p[0] = ('maybe_assignment_word', p[1])
-
-def p_cmd_prefix(p):
- """cmd_prefix : io_redirect
- | cmd_prefix io_redirect
- | assignment_word
- | cmd_prefix assignment_word"""
- try:
- prefix = get_production(p[1:], 'cmd_prefix')
- except KeyError:
- prefix = ['cmd_prefix']
-
- try:
- value = get_production(p[1:], 'assignment_word')[1]
- value = ('ASSIGNMENT_WORD', value.split('=', 1))
- except KeyError:
- value = get_production(p[1:], 'io_redirect')
- p[0] = prefix + [value]
-
-def p_cmd_suffix(p):
- """cmd_suffix : io_redirect
- | cmd_suffix io_redirect
- | token
- | cmd_suffix token
- | maybe_for_word
- | cmd_suffix maybe_for_word
- | maybe_done_word
- | cmd_suffix maybe_done_word
- | maybe_do_word
- | cmd_suffix maybe_do_word
- | maybe_until_word
- | cmd_suffix maybe_until_word
- | maybe_assignment_word
- | cmd_suffix maybe_assignment_word
- | maybe_if_word
- | cmd_suffix maybe_if_word
- | maybe_then_word
- | cmd_suffix maybe_then_word
- | maybe_bang_word
- | cmd_suffix maybe_bang_word"""
- try:
- suffix = get_production(p[1:], 'cmd_suffix')
- token = p[2]
- except KeyError:
- suffix = ['cmd_suffix']
- token = p[1]
-
- if isinstance(token, tuple):
- if token[0]=='io_redirect':
- p[0] = suffix + [token]
- else:
- #Convert maybe_* to TOKEN if necessary
- p[0] = suffix + [('TOKEN', token[1])]
- else:
- p[0] = suffix + [('TOKEN', token)]
-
-def p_redirect_list(p):
- """redirect_list : io_redirect
- | redirect_list io_redirect"""
- if len(p) == 2:
- p[0] = ['redirect_list', make_io_redirect(p[1])]
- else:
- p[0] = p[1] + [make_io_redirect(p[2])]
-
-def p_io_redirect(p):
- """io_redirect : io_file
- | IO_NUMBER io_file
- | io_here
- | IO_NUMBER io_here"""
- if len(p)==3:
- p[0] = ('io_redirect', p[1], p[2])
- else:
- p[0] = ('io_redirect', None, p[1])
-
-def p_io_file(p):
- #Return the tuple (operator, filename)
- """io_file : LESS filename
- | LESSAND filename
- | GREATER filename
- | GREATAND filename
- | DGREAT filename
- | LESSGREAT filename
- | CLOBBER filename"""
- #Extract the filename from the file
- p[0] = ('io_file', p[1], p[2][1])
-
-def p_filename(p):
- #Return the filename
- """filename : TOKEN"""
- p[0] = ('filename', p[1])
-
-def p_io_here(p):
- """io_here : DLESS here_end
- | DLESSDASH here_end"""
- p[0] = ('io_here', p[1], p[2][1], p[2][2])
-
-def p_here_end(p):
- """here_end : HERENAME TOKEN"""
- p[0] = ('here_document', p[1], p[2])
-
-def p_newline_sequence(p):
- # Nothing in the grammar can handle leading NEWLINE productions, so add
- # this one with the lowest possible priority relatively to newline_list.
- """newline_sequence : newline_list"""
- p[0] = None
-
-def p_newline_list(p):
- """newline_list : NEWLINE
- | newline_list NEWLINE"""
- p[0] = None
-
-def p_linebreak(p):
- """linebreak : newline_list
- | empty"""
- p[0] = None
-
-def p_separator_op(p):
- """separator_op : COMMA
- | AMP"""
- p[0] = p[1]
-
-def p_separator(p):
- """separator : separator_op linebreak
- | newline_list"""
- if len(p)==2:
- #Ignore newlines
- p[0] = None
- else:
- #Keep the separator operator
- p[0] = ('separator', p[1])
-
-def p_sequential_sep(p):
- """sequential_sep : COMMA linebreak
- | newline_list"""
- p[0] = None
-
-# Low priority TOKEN => for_word conversion.
-# Let maybe_for_word be used as a token when necessary in higher priority
-# rules.
-def p_for_word(p):
- """for_word : maybe_for_word"""
- p[0] = p[1]
-
-def p_if_word(p):
- """if_word : maybe_if_word"""
- p[0] = p[1]
-
-def p_then_word(p):
- """then_word : maybe_then_word"""
- p[0] = p[1]
-
-def p_done_word(p):
- """done_word : maybe_done_word"""
- p[0] = p[1]
-
-def p_do_word(p):
- """do_word : maybe_do_word"""
- p[0] = p[1]
-
-def p_until_word(p):
- """until_word : maybe_until_word"""
- p[0] = p[1]
-
-def p_assignment_word(p):
- """assignment_word : maybe_assignment_word"""
- p[0] = ('assignment_word', p[1][1])
-
-def p_bang_word(p):
- """bang_word : maybe_bang_word"""
- p[0] = ('bang_word', p[1][1])
-
-def p_token(p):
- """token : TOKEN
- | Fi"""
- p[0] = p[1]
-
-def p_empty(p):
- 'empty :'
- p[0] = None
-
-# Error rule for syntax errors
-def p_error(p):
- msg = []
- w = msg.append
- w('%r\n' % p)
- w('followed by:\n')
- for i in range(5):
- n = yacc.token()
- if not n:
- break
- w(' %r\n' % n)
- raise sherrors.ShellSyntaxError(''.join(msg))
-
-# Build the parser
-try:
- import pyshtables
-except ImportError:
- outputdir = os.path.dirname(__file__)
- if not os.access(outputdir, os.W_OK):
- outputdir = ''
- yacc.yacc(tabmodule = 'pyshtables', outputdir = outputdir, debug = 0)
-else:
- yacc.yacc(tabmodule = 'pysh.pyshtables', write_tables = 0, debug = 0)
-
-
-def parse(input, eof=False, debug=False):
- """Parse a whole script at once and return the generated AST and unconsumed
- data in a tuple.
-
- NOTE: eof is probably meaningless for now, the parser being unable to work
- in pull mode. It should be set to True.
- """
- lexer = pyshlex.PLYLexer()
- remaining = lexer.add(input, eof)
- if lexer.is_empty():
- return [], remaining
- if debug:
- debug = 2
- return yacc.parse(lexer=lexer, debug=debug), remaining
-
-#-------------------------------------------------------------------------------
-# AST rendering helpers
-#-------------------------------------------------------------------------------
-
-def format_commands(v):
- """Return a tree made of strings and lists. Make command trees easier to
- display.
- """
- if isinstance(v, list):
- return [format_commands(c) for c in v]
- if isinstance(v, tuple):
- if len(v)==2 and isinstance(v[0], str) and not isinstance(v[1], str):
- if v[0] == 'async':
- return ['AsyncList', map(format_commands, v[1])]
- else:
- #Avoid decomposing tuples like ('pipeline', Pipeline(...))
- return format_commands(v[1])
- return format_commands(list(v))
- elif isinstance(v, IfCond):
- name = ['IfCond']
- name += ['if', map(format_commands, v.cond)]
- name += ['then', map(format_commands, v.if_cmds)]
- name += ['else', map(format_commands, v.else_cmds)]
- return name
- elif isinstance(v, ForLoop):
- name = ['ForLoop']
- name += [repr(v.name)+' in ', map(str, v.items)]
- name += ['commands', map(format_commands, v.cmds)]
- return name
- elif isinstance(v, AndOr):
- return [v.op, format_commands(v.left), format_commands(v.right)]
- elif isinstance(v, Pipeline):
- name = 'Pipeline'
- if v.reverse_status:
- name = '!' + name
- return [name, format_commands(v.commands)]
- elif isinstance(v, Case):
- name = ['Case']
- name += [v.name, format_commands(v.items)]
- elif isinstance(v, SimpleCommand):
- name = ['SimpleCommand']
- if v.words:
- name += ['words', map(str, v.words)]
- if v.assigns:
- assigns = [tuple(a[1]) for a in v.assigns]
- name += ['assigns', map(str, assigns)]
- if v.redirs:
- name += ['redirs', map(format_commands, v.redirs)]
- return name
- elif isinstance(v, RedirectList):
- name = ['RedirectList']
- if v.redirs:
- name += ['redirs', map(format_commands, v.redirs)]
- name += ['command', format_commands(v.cmd)]
- return name
- elif isinstance(v, IORedirect):
- return ' '.join(map(str, (v.io_number, v.op, v.filename)))
- elif isinstance(v, HereDocument):
- return ' '.join(map(str, (v.io_number, v.op, repr(v.name), repr(v.content))))
- elif isinstance(v, SubShell):
- return ['SubShell', map(format_commands, v.cmds)]
- else:
- return repr(v)
-
-def print_commands(cmds, output=sys.stdout):
- """Pretty print a command tree."""
- def print_tree(cmd, spaces, output):
- if isinstance(cmd, list):
- for c in cmd:
- print_tree(c, spaces + 3, output)
- else:
- print >>output, ' '*spaces + str(cmd)
-
- formatted = format_commands(cmds)
- print_tree(formatted, 0, output)
-
-
-def stringify_commands(cmds):
- """Serialize a command tree as a string.
-
- Returned string is not pretty and is currently used for unit tests only.
- """
- def stringify(value):
- output = []
- if isinstance(value, list):
- formatted = []
- for v in value:
- formatted.append(stringify(v))
- formatted = ' '.join(formatted)
- output.append(''.join(['<', formatted, '>']))
- else:
- output.append(value)
- return ' '.join(output)
-
- return stringify(format_commands(cmds))
-
-
-def visit_commands(cmds, callable):
- """Visit the command tree and execute callable on every Pipeline and
- SimpleCommand instances.
- """
- if isinstance(cmds, (tuple, list)):
- map(lambda c: visit_commands(c,callable), cmds)
- elif isinstance(cmds, (Pipeline, SimpleCommand)):
- callable(cmds)
diff --git a/bitbake/lib/bb/pysh/sherrors.py b/bitbake/lib/bb/pysh/sherrors.py
deleted file mode 100644
index 1d5bd53b3a..0000000000
--- a/bitbake/lib/bb/pysh/sherrors.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# sherrors.py - shell errors and signals
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-"""Define shell exceptions and error codes.
-"""
-
-class ShellError(Exception):
- pass
-
-class ShellSyntaxError(ShellError):
- pass
-
-class UtilityError(ShellError):
- """Raised upon utility syntax error (option or operand error)."""
- pass
-
-class ExpansionError(ShellError):
- pass
-
-class CommandNotFound(ShellError):
- """Specified command was not found."""
- pass
-
-class RedirectionError(ShellError):
- pass
-
-class VarAssignmentError(ShellError):
- """Variable assignment error."""
- pass
-
-class ExitSignal(ShellError):
- """Exit signal."""
- pass
-
-class ReturnSignal(ShellError):
- """Exit signal."""
- pass \ No newline at end of file
diff --git a/bitbake/lib/bb/pysh/subprocess_fix.py b/bitbake/lib/bb/pysh/subprocess_fix.py
deleted file mode 100644
index 46eca22802..0000000000
--- a/bitbake/lib/bb/pysh/subprocess_fix.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# subprocess - Subprocesses with accessible I/O streams
-#
-# For more information about this module, see PEP 324.
-#
-# This module should remain compatible with Python 2.2, see PEP 291.
-#
-# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
-#
-# Licensed to PSF under a Contributor Agreement.
-# See http://www.python.org/2.4/license for licensing details.
-
-def list2cmdline(seq):
- """
- Translate a sequence of arguments into a command line
- string, using the same rules as the MS C runtime:
-
- 1) Arguments are delimited by white space, which is either a
- space or a tab.
-
- 2) A string surrounded by double quotation marks is
- interpreted as a single argument, regardless of white space
- contained within. A quoted string can be embedded in an
- argument.
-
- 3) A double quotation mark preceded by a backslash is
- interpreted as a literal double quotation mark.
-
- 4) Backslashes are interpreted literally, unless they
- immediately precede a double quotation mark.
-
- 5) If backslashes immediately precede a double quotation mark,
- every pair of backslashes is interpreted as a literal
- backslash. If the number of backslashes is odd, the last
- backslash escapes the next double quotation mark as
- described in rule 3.
- """
-
- # See
- # http://msdn.microsoft.com/library/en-us/vccelng/htm/progs_12.asp
- result = []
- needquote = False
- for arg in seq:
- bs_buf = []
-
- # Add a space to separate this argument from the others
- if result:
- result.append(' ')
-
- needquote = (" " in arg) or ("\t" in arg) or ("|" in arg) or arg == ""
- if needquote:
- result.append('"')
-
- for c in arg:
- if c == '\\':
- # Don't know if we need to double yet.
- bs_buf.append(c)
- elif c == '"':
- # Double backspaces.
- result.append('\\' * len(bs_buf)*2)
- bs_buf = []
- result.append('\\"')
- else:
- # Normal char
- if bs_buf:
- result.extend(bs_buf)
- bs_buf = []
- result.append(c)
-
- # Add remaining backspaces, if any.
- if bs_buf:
- result.extend(bs_buf)
-
- if needquote:
- result.extend(bs_buf)
- result.append('"')
-
- return ''.join(result)
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
deleted file mode 100644
index 172e591522..0000000000
--- a/bitbake/lib/bb/runqueue.py
+++ /dev/null
@@ -1,1663 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'RunQueue' implementation
-
-Handles preparation and execution of a queue of tasks
-"""
-
-# Copyright (C) 2006-2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import copy
-import os
-import sys
-import signal
-import stat
-import fcntl
-import logging
-import bb
-from bb import msg, data, event
-
-bblogger = logging.getLogger("BitBake")
-logger = logging.getLogger("BitBake.RunQueue")
-
-class RunQueueStats:
- """
- Holds statistics on the tasks handled by the associated runQueue
- """
- def __init__(self, total):
- self.completed = 0
- self.skipped = 0
- self.failed = 0
- self.active = 0
- self.total = total
-
- def copy(self):
- obj = self.__class__(self.total)
- obj.__dict__.update(self.__dict__)
- return obj
-
- def taskFailed(self):
- self.active = self.active - 1
- self.failed = self.failed + 1
-
- def taskCompleted(self, number = 1):
- self.active = self.active - number
- self.completed = self.completed + number
-
- def taskSkipped(self, number = 1):
- self.active = self.active + number
- self.skipped = self.skipped + number
-
- def taskActive(self):
- self.active = self.active + 1
-
-# These values indicate the next step due to be run in the
-# runQueue state machine
-runQueuePrepare = 2
-runQueueSceneInit = 3
-runQueueSceneRun = 4
-runQueueRunInit = 5
-runQueueRunning = 6
-runQueueFailed = 7
-runQueueCleanUp = 8
-runQueueComplete = 9
-runQueueChildProcess = 10
-
-class RunQueueScheduler(object):
- """
- Control the order tasks are scheduled in.
- """
- name = "basic"
-
- def __init__(self, runqueue, rqdata):
- """
- The default scheduler just returns the first buildable task (the
- priority map is sorted by task numer)
- """
- self.rq = runqueue
- self.rqdata = rqdata
- numTasks = len(self.rqdata.runq_fnid)
-
- self.prio_map = []
- self.prio_map.extend(range(numTasks))
-
- def next_buildable_task(self):
- """
- Return the id of the first task we find that is buildable
- """
- for tasknum in xrange(len(self.rqdata.runq_fnid)):
- taskid = self.prio_map[tasknum]
- if self.rq.runq_running[taskid] == 1:
- continue
- if self.rq.runq_buildable[taskid] == 1:
- return taskid
-
- def next(self):
- """
- Return the id of the task we should build next
- """
- if self.rq.stats.active < self.rq.number_tasks:
- return self.next_buildable_task()
-
-class RunQueueSchedulerSpeed(RunQueueScheduler):
- """
- A scheduler optimised for speed. The priority map is sorted by task weight,
- heavier weighted tasks (tasks needed by the most other tasks) are run first.
- """
- name = "speed"
-
- def __init__(self, runqueue, rqdata):
- """
- The priority map is sorted by task weight.
- """
-
- self.rq = runqueue
- self.rqdata = rqdata
-
- sortweight = sorted(copy.deepcopy(self.rqdata.runq_weight))
- copyweight = copy.deepcopy(self.rqdata.runq_weight)
- self.prio_map = []
-
- for weight in sortweight:
- idx = copyweight.index(weight)
- self.prio_map.append(idx)
- copyweight[idx] = -1
-
- self.prio_map.reverse()
-
-class RunQueueSchedulerCompletion(RunQueueSchedulerSpeed):
- """
- A scheduler optimised to complete .bb files are quickly as possible. The
- priority map is sorted by task weight, but then reordered so once a given
- .bb file starts to build, its completed as quickly as possible. This works
- well where disk space is at a premium and classes like OE's rm_work are in
- force.
- """
- name = "completion"
-
- def __init__(self, runqueue, rqdata):
- RunQueueSchedulerSpeed.__init__(self, runqueue, rqdata)
-
- #FIXME - whilst this groups all fnids together it does not reorder the
- #fnid groups optimally.
-
- basemap = copy.deepcopy(self.prio_map)
- self.prio_map = []
- while (len(basemap) > 0):
- entry = basemap.pop(0)
- self.prio_map.append(entry)
- fnid = self.rqdata.runq_fnid[entry]
- todel = []
- for entry in basemap:
- entry_fnid = self.rqdata.runq_fnid[entry]
- if entry_fnid == fnid:
- todel.append(basemap.index(entry))
- self.prio_map.append(entry)
- todel.reverse()
- for idx in todel:
- del basemap[idx]
-
-class RunQueueData:
- """
- BitBake Run Queue implementation
- """
- def __init__(self, rq, cooker, cfgData, dataCache, taskData, targets):
- self.cooker = cooker
- self.dataCache = dataCache
- self.taskData = taskData
- self.targets = targets
- self.rq = rq
-
- self.stampwhitelist = bb.data.getVar("BB_STAMP_WHITELIST", cfgData, 1) or ""
- self.multi_provider_whitelist = (bb.data.getVar("MULTI_PROVIDER_WHITELIST", cfgData, 1) or "").split()
-
- self.reset()
-
- def reset(self):
- self.runq_fnid = []
- self.runq_task = []
- self.runq_depends = []
- self.runq_revdeps = []
- self.runq_hash = []
-
- def runq_depends_names(self, ids):
- import re
- ret = []
- for id in self.runq_depends[ids]:
- nam = os.path.basename(self.get_user_idstring(id))
- nam = re.sub("_[^,]*,", ",", nam)
- ret.extend([nam])
- return ret
-
- def get_user_idstring(self, task):
- fn = self.taskData.fn_index[self.runq_fnid[task]]
- taskname = self.runq_task[task]
- return "%s, %s" % (fn, taskname)
-
- def get_task_id(self, fnid, taskname):
- for listid in xrange(len(self.runq_fnid)):
- if self.runq_fnid[listid] == fnid and self.runq_task[listid] == taskname:
- return listid
- return None
-
- def circular_depchains_handler(self, tasks):
- """
- Some tasks aren't buildable, likely due to circular dependency issues.
- Identify the circular dependencies and print them in a user readable format.
- """
- from copy import deepcopy
-
- valid_chains = []
- explored_deps = {}
- msgs = []
-
- def chain_reorder(chain):
- """
- Reorder a dependency chain so the lowest task id is first
- """
- lowest = 0
- new_chain = []
- for entry in xrange(len(chain)):
- if chain[entry] < chain[lowest]:
- lowest = entry
- new_chain.extend(chain[lowest:])
- new_chain.extend(chain[:lowest])
- return new_chain
-
- def chain_compare_equal(chain1, chain2):
- """
- Compare two dependency chains and see if they're the same
- """
- if len(chain1) != len(chain2):
- return False
- for index in xrange(len(chain1)):
- if chain1[index] != chain2[index]:
- return False
- return True
-
- def chain_array_contains(chain, chain_array):
- """
- Return True if chain_array contains chain
- """
- for ch in chain_array:
- if chain_compare_equal(ch, chain):
- return True
- return False
-
- def find_chains(taskid, prev_chain):
- prev_chain.append(taskid)
- total_deps = []
- total_deps.extend(self.runq_revdeps[taskid])
- for revdep in self.runq_revdeps[taskid]:
- if revdep in prev_chain:
- idx = prev_chain.index(revdep)
- # To prevent duplicates, reorder the chain to start with the lowest taskid
- # and search through an array of those we've already printed
- chain = prev_chain[idx:]
- new_chain = chain_reorder(chain)
- if not chain_array_contains(new_chain, valid_chains):
- valid_chains.append(new_chain)
- msgs.append("Dependency loop #%d found:\n" % len(valid_chains))
- for dep in new_chain:
- msgs.append(" Task %s (%s) (dependent Tasks %s)\n" % (dep, self.get_user_idstring(dep), self.runq_depends_names(dep)))
- msgs.append("\n")
- if len(valid_chains) > 10:
- msgs.append("Aborted dependency loops search after 10 matches.\n")
- return msgs
- continue
- scan = False
- if revdep not in explored_deps:
- scan = True
- elif revdep in explored_deps[revdep]:
- scan = True
- else:
- for dep in prev_chain:
- if dep in explored_deps[revdep]:
- scan = True
- if scan:
- find_chains(revdep, copy.deepcopy(prev_chain))
- for dep in explored_deps[revdep]:
- if dep not in total_deps:
- total_deps.append(dep)
-
- explored_deps[taskid] = total_deps
-
- for task in tasks:
- find_chains(task, [])
-
- return msgs
-
- def calculate_task_weights(self, endpoints):
- """
- Calculate a number representing the "weight" of each task. Heavier weighted tasks
- have more dependencies and hence should be executed sooner for maximum speed.
-
- This function also sanity checks the task list finding tasks that are not
- possible to execute due to circular dependencies.
- """
-
- numTasks = len(self.runq_fnid)
- weight = []
- deps_left = []
- task_done = []
-
- for listid in xrange(numTasks):
- task_done.append(False)
- weight.append(0)
- deps_left.append(len(self.runq_revdeps[listid]))
-
- for listid in endpoints:
- weight[listid] = 1
- task_done[listid] = True
-
- while True:
- next_points = []
- for listid in endpoints:
- for revdep in self.runq_depends[listid]:
- weight[revdep] = weight[revdep] + weight[listid]
- deps_left[revdep] = deps_left[revdep] - 1
- if deps_left[revdep] == 0:
- next_points.append(revdep)
- task_done[revdep] = True
- endpoints = next_points
- if len(next_points) == 0:
- break
-
- # Circular dependency sanity check
- problem_tasks = []
- for task in xrange(numTasks):
- if task_done[task] is False or deps_left[task] != 0:
- problem_tasks.append(task)
- logger.debug(2, "Task %s (%s) is not buildable", task, self.get_user_idstring(task))
- logger.debug(2, "(Complete marker was %s and the remaining dependency count was %s)\n", task_done[task], deps_left[task])
-
- if problem_tasks:
- message = "Unbuildable tasks were found.\n"
- message = message + "These are usually caused by circular dependencies and any circular dependency chains found will be printed below. Increase the debug level to see a list of unbuildable tasks.\n\n"
- message = message + "Identifying dependency loops (this may take a short while)...\n"
- logger.error(message)
-
- msgs = self.circular_depchains_handler(problem_tasks)
-
- message = "\n"
- for msg in msgs:
- message = message + msg
- bb.msg.fatal(bb.msg.domain.RunQueue, message)
-
- return weight
-
- def prepare(self):
- """
- Turn a set of taskData into a RunQueue and compute data needed
- to optimise the execution order.
- """
-
- runq_build = []
- recursive_tdepends = {}
- runq_recrdepends = []
- tdepends_fnid = {}
-
- taskData = self.taskData
-
- if len(taskData.tasks_name) == 0:
- # Nothing to do
- return 0
-
- logger.info("Preparing runqueue")
-
- # Step A - Work out a list of tasks to run
- #
- # Taskdata gives us a list of possible providers for every build and run
- # target ordered by priority. It also gives information on each of those
- # providers.
- #
- # To create the actual list of tasks to execute we fix the list of
- # providers and then resolve the dependencies into task IDs. This
- # process is repeated for each type of dependency (tdepends, deptask,
- # rdeptast, recrdeptask, idepends).
-
- def add_build_dependencies(depids, tasknames, depends):
- for depid in depids:
- # Won't be in build_targets if ASSUME_PROVIDED
- if depid not in taskData.build_targets:
- continue
- depdata = taskData.build_targets[depid][0]
- if depdata is None:
- continue
- dep = taskData.fn_index[depdata]
- for taskname in tasknames:
- taskid = taskData.gettask_id(dep, taskname, False)
- if taskid is not None:
- depends.append(taskid)
-
- def add_runtime_dependencies(depids, tasknames, depends):
- for depid in depids:
- if depid not in taskData.run_targets:
- continue
- depdata = taskData.run_targets[depid][0]
- if depdata is None:
- continue
- dep = taskData.fn_index[depdata]
- for taskname in tasknames:
- taskid = taskData.gettask_id(dep, taskname, False)
- if taskid is not None:
- depends.append(taskid)
-
- for task in xrange(len(taskData.tasks_name)):
- depends = []
- recrdepends = []
- fnid = taskData.tasks_fnid[task]
- fn = taskData.fn_index[fnid]
- task_deps = self.dataCache.task_deps[fn]
-
- logger.debug(2, "Processing %s:%s", fn, taskData.tasks_name[task])
-
- if fnid not in taskData.failed_fnids:
-
- # Resolve task internal dependencies
- #
- # e.g. addtask before X after Y
- depends = taskData.tasks_tdepends[task]
-
- # Resolve 'deptask' dependencies
- #
- # e.g. do_sometask[deptask] = "do_someothertask"
- # (makes sure sometask runs after someothertask of all DEPENDS)
- if 'deptask' in task_deps and taskData.tasks_name[task] in task_deps['deptask']:
- tasknames = task_deps['deptask'][taskData.tasks_name[task]].split()
- add_build_dependencies(taskData.depids[fnid], tasknames, depends)
-
- # Resolve 'rdeptask' dependencies
- #
- # e.g. do_sometask[rdeptask] = "do_someothertask"
- # (makes sure sometask runs after someothertask of all RDEPENDS)
- if 'rdeptask' in task_deps and taskData.tasks_name[task] in task_deps['rdeptask']:
- taskname = task_deps['rdeptask'][taskData.tasks_name[task]]
- add_runtime_dependencies(taskData.rdepids[fnid], [taskname], depends)
-
- # Resolve inter-task dependencies
- #
- # e.g. do_sometask[depends] = "targetname:do_someothertask"
- # (makes sure sometask runs after targetname's someothertask)
- if fnid not in tdepends_fnid:
- tdepends_fnid[fnid] = set()
- idepends = taskData.tasks_idepends[task]
- for (depid, idependtask) in idepends:
- if depid in taskData.build_targets:
- # Won't be in build_targets if ASSUME_PROVIDED
- depdata = taskData.build_targets[depid][0]
- if depdata is not None:
- dep = taskData.fn_index[depdata]
- taskid = taskData.gettask_id(dep, idependtask, False)
- if taskid is None:
- bb.msg.fatal(bb.msg.domain.RunQueue, "Task %s in %s depends upon nonexistant task %s in %s" % (taskData.tasks_name[task], fn, idependtask, dep))
- depends.append(taskid)
- if depdata != fnid:
- tdepends_fnid[fnid].add(taskid)
-
-
- # Resolve recursive 'recrdeptask' dependencies (A)
- #
- # e.g. do_sometask[recrdeptask] = "do_someothertask"
- # (makes sure sometask runs after someothertask of all DEPENDS, RDEPENDS and intertask dependencies, recursively)
- # We cover the recursive part of the dependencies below
- if 'recrdeptask' in task_deps and taskData.tasks_name[task] in task_deps['recrdeptask']:
- for taskname in task_deps['recrdeptask'][taskData.tasks_name[task]].split():
- recrdepends.append(taskname)
- add_build_dependencies(taskData.depids[fnid], [taskname], depends)
- add_runtime_dependencies(taskData.rdepids[fnid], [taskname], depends)
-
- # Rmove all self references
- if task in depends:
- newdep = []
- logger.debug(2, "Task %s (%s %s) contains self reference! %s", task, taskData.fn_index[taskData.tasks_fnid[task]], taskData.tasks_name[task], depends)
- for dep in depends:
- if task != dep:
- newdep.append(dep)
- depends = newdep
-
- self.runq_fnid.append(taskData.tasks_fnid[task])
- self.runq_task.append(taskData.tasks_name[task])
- self.runq_depends.append(set(depends))
- self.runq_revdeps.append(set())
- self.runq_hash.append("")
-
- runq_build.append(0)
- runq_recrdepends.append(recrdepends)
-
- #
- # Build a list of recursive cumulative dependencies for each fnid
- # We do this by fnid, since if A depends on some task in B
- # we're interested in later tasks B's fnid might have but B itself
- # doesn't depend on
- #
- # Algorithm is O(tasks) + O(tasks)*O(fnids)
- #
- reccumdepends = {}
- for task in xrange(len(self.runq_fnid)):
- fnid = self.runq_fnid[task]
- if fnid not in reccumdepends:
- if fnid in tdepends_fnid:
- reccumdepends[fnid] = tdepends_fnid[fnid]
- else:
- reccumdepends[fnid] = set()
- reccumdepends[fnid].update(self.runq_depends[task])
- for task in xrange(len(self.runq_fnid)):
- taskfnid = self.runq_fnid[task]
- for fnid in reccumdepends:
- if task in reccumdepends[fnid]:
- reccumdepends[fnid].add(task)
- if taskfnid in reccumdepends:
- reccumdepends[fnid].update(reccumdepends[taskfnid])
-
-
- # Resolve recursive 'recrdeptask' dependencies (B)
- #
- # e.g. do_sometask[recrdeptask] = "do_someothertask"
- # (makes sure sometask runs after someothertask of all DEPENDS, RDEPENDS and intertask dependencies, recursively)
- for task in xrange(len(self.runq_fnid)):
- if len(runq_recrdepends[task]) > 0:
- taskfnid = self.runq_fnid[task]
- for dep in reccumdepends[taskfnid]:
- # Ignore self references
- if dep == task:
- continue
- for taskname in runq_recrdepends[task]:
- if taskData.tasks_name[dep] == taskname:
- self.runq_depends[task].add(dep)
-
- # Step B - Mark all active tasks
- #
- # Start with the tasks we were asked to run and mark all dependencies
- # as active too. If the task is to be 'forced', clear its stamp. Once
- # all active tasks are marked, prune the ones we don't need.
-
- logger.verbose("Marking Active Tasks")
-
- def mark_active(listid, depth):
- """
- Mark an item as active along with its depends
- (calls itself recursively)
- """
-
- if runq_build[listid] == 1:
- return
-
- runq_build[listid] = 1
-
- depends = self.runq_depends[listid]
- for depend in depends:
- mark_active(depend, depth+1)
-
- self.target_pairs = []
- for target in self.targets:
- targetid = taskData.getbuild_id(target[0])
-
- if targetid not in taskData.build_targets:
- continue
-
- if targetid in taskData.failed_deps:
- continue
-
- fnid = taskData.build_targets[targetid][0]
- fn = taskData.fn_index[fnid]
- self.target_pairs.append((fn, target[1]))
-
- if fnid in taskData.failed_fnids:
- continue
-
- if target[1] not in taskData.tasks_lookup[fnid]:
- bb.msg.fatal(bb.msg.domain.RunQueue, "Task %s does not exist for target %s" % (target[1], target[0]))
-
- listid = taskData.tasks_lookup[fnid][target[1]]
-
- mark_active(listid, 1)
-
- # Step C - Prune all inactive tasks
- #
- # Once all active tasks are marked, prune the ones we don't need.
-
- maps = []
- delcount = 0
- for listid in xrange(len(self.runq_fnid)):
- if runq_build[listid-delcount] == 1:
- maps.append(listid-delcount)
- else:
- del self.runq_fnid[listid-delcount]
- del self.runq_task[listid-delcount]
- del self.runq_depends[listid-delcount]
- del runq_build[listid-delcount]
- del self.runq_revdeps[listid-delcount]
- del self.runq_hash[listid-delcount]
- delcount = delcount + 1
- maps.append(-1)
-
- #
- # Step D - Sanity checks and computation
- #
-
- # Check to make sure we still have tasks to run
- if len(self.runq_fnid) == 0:
- if not taskData.abort:
- bb.msg.fatal(bb.msg.domain.RunQueue, "All buildable tasks have been run but the build is incomplete (--continue mode). Errors for the tasks that failed will have been printed above.")
- else:
- bb.msg.fatal(bb.msg.domain.RunQueue, "No active tasks and not in --continue mode?! Please report this bug.")
-
- logger.verbose("Pruned %s inactive tasks, %s left", delcount, len(self.runq_fnid))
-
- # Remap the dependencies to account for the deleted tasks
- # Check we didn't delete a task we depend on
- for listid in xrange(len(self.runq_fnid)):
- newdeps = []
- origdeps = self.runq_depends[listid]
- for origdep in origdeps:
- if maps[origdep] == -1:
- bb.msg.fatal(bb.msg.domain.RunQueue, "Invalid mapping - Should never happen!")
- newdeps.append(maps[origdep])
- self.runq_depends[listid] = set(newdeps)
-
- logger.verbose("Assign Weightings")
-
- # Generate a list of reverse dependencies to ease future calculations
- for listid in xrange(len(self.runq_fnid)):
- for dep in self.runq_depends[listid]:
- self.runq_revdeps[dep].add(listid)
-
- # Identify tasks at the end of dependency chains
- # Error on circular dependency loops (length two)
- endpoints = []
- for listid in xrange(len(self.runq_fnid)):
- revdeps = self.runq_revdeps[listid]
- if len(revdeps) == 0:
- endpoints.append(listid)
- for dep in revdeps:
- if dep in self.runq_depends[listid]:
- #self.dump_data(taskData)
- bb.msg.fatal(bb.msg.domain.RunQueue, "Task %s (%s) has circular dependency on %s (%s)" % (taskData.fn_index[self.runq_fnid[dep]], self.runq_task[dep], taskData.fn_index[self.runq_fnid[listid]], self.runq_task[listid]))
-
- logger.verbose("Compute totals (have %s endpoint(s))", len(endpoints))
-
- # Calculate task weights
- # Check of higher length circular dependencies
- self.runq_weight = self.calculate_task_weights(endpoints)
-
- # Sanity Check - Check for multiple tasks building the same provider
- prov_list = {}
- seen_fn = []
- for task in xrange(len(self.runq_fnid)):
- fn = taskData.fn_index[self.runq_fnid[task]]
- if fn in seen_fn:
- continue
- seen_fn.append(fn)
- for prov in self.dataCache.fn_provides[fn]:
- if prov not in prov_list:
- prov_list[prov] = [fn]
- elif fn not in prov_list[prov]:
- prov_list[prov].append(fn)
- error = False
- for prov in prov_list:
- if len(prov_list[prov]) > 1 and prov not in self.multi_provider_whitelist:
- error = True
- logger.error("Multiple .bb files are due to be built which each provide %s (%s).\n This usually means one provides something the other doesn't and should.", prov, " ".join(prov_list[prov]))
-
-
- # Create a whitelist usable by the stamp checks
- stampfnwhitelist = []
- for entry in self.stampwhitelist.split():
- entryid = self.taskData.getbuild_id(entry)
- if entryid not in self.taskData.build_targets:
- continue
- fnid = self.taskData.build_targets[entryid][0]
- fn = self.taskData.fn_index[fnid]
- stampfnwhitelist.append(fn)
- self.stampfnwhitelist = stampfnwhitelist
-
- # Interate over the task list looking for tasks with a 'setscene' function
- self.runq_setscene = []
- for task in range(len(self.runq_fnid)):
- setscene = taskData.gettask_id(self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task] + "_setscene", False)
- if not setscene:
- continue
- self.runq_setscene.append(task)
-
- # Interate over the task list and call into the siggen code
- dealtwith = set()
- todeal = set(range(len(self.runq_fnid)))
- while len(todeal) > 0:
- for task in todeal.copy():
- if len(self.runq_depends[task] - dealtwith) == 0:
- dealtwith.add(task)
- todeal.remove(task)
- procdep = []
- for dep in self.runq_depends[task]:
- procdep.append(self.taskData.fn_index[self.runq_fnid[dep]] + "." + self.runq_task[dep])
- self.runq_hash[task] = bb.parse.siggen.get_taskhash(self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task], procdep, self.dataCache)
-
- self.hashes = {}
- self.hash_deps = {}
- for task in xrange(len(self.runq_fnid)):
- identifier = '%s.%s' % (self.taskData.fn_index[self.runq_fnid[task]],
- self.runq_task[task])
- self.hashes[identifier] = self.runq_hash[task]
- deps = []
- for dep in self.runq_depends[task]:
- depidentifier = '%s.%s' % (self.taskData.fn_index[self.runq_fnid[dep]],
- self.runq_task[dep])
- deps.append(depidentifier)
- self.hash_deps[identifier] = deps
-
- # Remove stamps for targets if force mode active
- if self.cooker.configuration.force:
- for (fn, target) in self.target_pairs:
- logger.verbose("Remove stamp %s, %s", target, fn)
- bb.build.del_stamp(target, self.dataCache, fn)
-
- return len(self.runq_fnid)
-
- def dump_data(self, taskQueue):
- """
- Dump some debug information on the internal data structures
- """
- logger.debug(3, "run_tasks:")
- for task in xrange(len(self.rqdata.runq_task)):
- logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task,
- taskQueue.fn_index[self.rqdata.runq_fnid[task]],
- self.rqdata.runq_task[task],
- self.rqdata.runq_weight[task],
- self.rqdata.runq_depends[task],
- self.rqdata.runq_revdeps[task])
-
- logger.debug(3, "sorted_tasks:")
- for task1 in xrange(len(self.rqdata.runq_task)):
- if task1 in self.prio_map:
- task = self.prio_map[task1]
- logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task,
- taskQueue.fn_index[self.rqdata.runq_fnid[task]],
- self.rqdata.runq_task[task],
- self.rqdata.runq_weight[task],
- self.rqdata.runq_depends[task],
- self.rqdata.runq_revdeps[task])
-
-
-class RunQueue:
- def __init__(self, cooker, cfgData, dataCache, taskData, targets):
-
- self.cooker = cooker
- self.cfgData = cfgData
- self.rqdata = RunQueueData(self, cooker, cfgData, dataCache, taskData, targets)
-
- self.stamppolicy = bb.data.getVar("BB_STAMP_POLICY", cfgData, True) or "perfile"
- self.hashvalidate = bb.data.getVar("BB_HASHCHECK_FUNCTION", cfgData, True) or None
-
- self.state = runQueuePrepare
-
- def check_stamps(self):
- unchecked = {}
- current = []
- notcurrent = []
- buildable = []
-
- if self.stamppolicy == "perfile":
- fulldeptree = False
- else:
- fulldeptree = True
- stampwhitelist = []
- if self.stamppolicy == "whitelist":
- stampwhitelist = self.rqdata.stampfnwhitelist
-
- for task in xrange(len(self.rqdata.runq_fnid)):
- unchecked[task] = ""
- if len(self.rqdata.runq_depends[task]) == 0:
- buildable.append(task)
-
- def check_buildable(self, task, buildable):
- for revdep in self.rqdata.runq_revdeps[task]:
- alldeps = 1
- for dep in self.rqdata.runq_depends[revdep]:
- if dep in unchecked:
- alldeps = 0
- if alldeps == 1:
- if revdep in unchecked:
- buildable.append(revdep)
-
- for task in xrange(len(self.rqdata.runq_fnid)):
- if task not in unchecked:
- continue
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
- taskname = self.rqdata.runq_task[task]
- stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
- # If the stamp is missing its not current
- if not os.access(stampfile, os.F_OK):
- del unchecked[task]
- notcurrent.append(task)
- check_buildable(self, task, buildable)
- continue
- # If its a 'nostamp' task, it's not current
- taskdep = self.rqdata.dataCache.task_deps[fn]
- if 'nostamp' in taskdep and task in taskdep['nostamp']:
- del unchecked[task]
- notcurrent.append(task)
- check_buildable(self, task, buildable)
- continue
-
- while (len(buildable) > 0):
- nextbuildable = []
- for task in buildable:
- if task in unchecked:
- fn = self.taskData.fn_index[self.rqdata.runq_fnid[task]]
- taskname = self.rqdata.runq_task[task]
- stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
- iscurrent = True
-
- t1 = os.stat(stampfile)[stat.ST_MTIME]
- for dep in self.rqdata.runq_depends[task]:
- if iscurrent:
- fn2 = self.taskData.fn_index[self.rqdata.runq_fnid[dep]]
- taskname2 = self.rqdata.runq_task[dep]
- stampfile2 = bb.build.stampfile(taskname2, self.rqdata.dataCache, fn2)
- if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
- if dep in notcurrent:
- iscurrent = False
- else:
- t2 = os.stat(stampfile2)[stat.ST_MTIME]
- if t1 < t2:
- iscurrent = False
- del unchecked[task]
- if iscurrent:
- current.append(task)
- else:
- notcurrent.append(task)
-
- check_buildable(self, task, nextbuildable)
-
- buildable = nextbuildable
-
- #for task in range(len(self.runq_fnid)):
- # fn = self.taskData.fn_index[self.runq_fnid[task]]
- # taskname = self.runq_task[task]
- # print "%s %s.%s" % (task, taskname, fn)
-
- #print "Unchecked: %s" % unchecked
- #print "Current: %s" % current
- #print "Not current: %s" % notcurrent
-
- if len(unchecked) > 0:
- bb.msg.fatal(bb.msg.domain.RunQueue, "check_stamps fatal internal error")
- return current
-
- def check_stamp_task(self, task, taskname = None):
- def get_timestamp(f):
- try:
- if not os.access(f, os.F_OK):
- return None
- return os.stat(f)[stat.ST_MTIME]
- except:
- return None
-
- if self.stamppolicy == "perfile":
- fulldeptree = False
- else:
- fulldeptree = True
- stampwhitelist = []
- if self.stamppolicy == "whitelist":
- stampwhitelist = self.rqdata.stampfnwhitelist
-
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
- if taskname is None:
- taskname = self.rqdata.runq_task[task]
-
- stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
-
- # If the stamp is missing its not current
- if not os.access(stampfile, os.F_OK):
- logger.debug(2, "Stampfile %s not available", stampfile)
- return False
- # If its a 'nostamp' task, it's not current
- taskdep = self.rqdata.dataCache.task_deps[fn]
- if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
- logger.debug(2, "%s.%s is nostamp\n", fn, taskname)
- return False
-
- if taskname != "do_setscene" and taskname.endswith("_setscene"):
- return True
-
- iscurrent = True
- t1 = get_timestamp(stampfile)
- for dep in self.rqdata.runq_depends[task]:
- if iscurrent:
- fn2 = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[dep]]
- taskname2 = self.rqdata.runq_task[dep]
- stampfile2 = bb.build.stampfile(taskname2, self.rqdata.dataCache, fn2)
- stampfile3 = bb.build.stampfile(taskname2 + "_setscene", self.rqdata.dataCache, fn2)
- t2 = get_timestamp(stampfile2)
- t3 = get_timestamp(stampfile3)
- if t3 and t3 > t2:
- continue
- if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
- if not t2:
- logger.debug(2, 'Stampfile %s does not exist', stampfile2)
- iscurrent = False
- if t1 < t2:
- logger.debug(2, 'Stampfile %s < %s', stampfile, stampfile2)
- iscurrent = False
-
- return iscurrent
-
- def execute_runqueue(self):
- """
- Run the tasks in a queue prepared by rqdata.prepare()
- Upon failure, optionally try to recover the build using any alternate providers
- (if the abort on failure configuration option isn't set)
- """
-
- retval = 0.5
-
- if self.state is runQueuePrepare:
- self.rqexe = RunQueueExecuteDummy(self)
- if self.rqdata.prepare() is 0:
- self.state = runQueueComplete
- else:
- self.state = runQueueSceneInit
-
- if self.state is runQueueSceneInit:
- if self.cooker.configuration.dump_signatures:
- self.dump_signatures()
- else:
- self.rqexe = RunQueueExecuteScenequeue(self)
-
- if self.state is runQueueSceneRun:
- retval = self.rqexe.execute()
-
- if self.state is runQueueRunInit:
- logger.info("Executing RunQueue Tasks")
- self.rqexe = RunQueueExecuteTasks(self)
- self.state = runQueueRunning
-
- if self.state is runQueueRunning:
- retval = self.rqexe.execute()
-
- if self.state is runQueueCleanUp:
- self.rqexe.finish()
-
- if self.state is runQueueFailed:
- if not self.rqdata.taskData.tryaltconfigs:
- raise bb.runqueue.TaskFailure(self.rqexe.failed_fnids)
- for fnid in self.rqexe.failed_fnids:
- self.rqdata.taskData.fail_fnid(fnid)
- self.rqdata.reset()
-
- if self.state is runQueueComplete:
- # All done
- logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed.", self.rqexe.stats.completed, self.rqexe.stats.skipped, self.rqexe.stats.failed)
- return False
-
- if self.state is runQueueChildProcess:
- print("Child process, eeek, shouldn't happen!")
- return False
-
- # Loop
- return retval
-
- def finish_runqueue(self, now = False):
- if now:
- self.rqexe.finish_now()
- else:
- self.rqexe.finish()
-
- def dump_signatures(self):
- self.state = runQueueComplete
- done = set()
- bb.note("Reparsing files to collect dependency data")
- for task in range(len(self.rqdata.runq_fnid)):
- if self.rqdata.runq_fnid[task] not in done:
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
- the_data = bb.cache.Cache.loadDataFull(fn, self.cooker.get_file_appends(fn), self.cooker.configuration.data)
- done.add(self.rqdata.runq_fnid[task])
-
- bb.parse.siggen.dump_sigs(self.rqdata.dataCache)
-
- return
-
-
-class RunQueueExecute:
-
- def __init__(self, rq):
- self.rq = rq
- self.cooker = rq.cooker
- self.cfgData = rq.cfgData
- self.rqdata = rq.rqdata
-
- self.number_tasks = int(bb.data.getVar("BB_NUMBER_THREADS", self.cfgData, 1) or 1)
- self.scheduler = bb.data.getVar("BB_SCHEDULER", self.cfgData, 1) or "speed"
-
- self.runq_buildable = []
- self.runq_running = []
- self.runq_complete = []
- self.build_pids = {}
- self.build_pipes = {}
- self.failed_fnids = []
-
- def runqueue_process_waitpid(self):
- """
- Return none is there are no processes awaiting result collection, otherwise
- collect the process exit codes and close the information pipe.
- """
- result = os.waitpid(-1, os.WNOHANG)
- if result[0] is 0 and result[1] is 0:
- return None
- task = self.build_pids[result[0]]
- del self.build_pids[result[0]]
- self.build_pipes[result[0]].close()
- del self.build_pipes[result[0]]
- if result[1] != 0:
- self.task_fail(task, result[1]>>8)
- else:
- self.task_complete(task)
- return True
-
- def finish_now(self):
- if self.stats.active:
- logger.info("Sending SIGTERM to remaining %s tasks", self.stats.active)
- for k, v in self.build_pids.iteritems():
- try:
- os.kill(-k, signal.SIGTERM)
- except:
- pass
- for pipe in self.build_pipes:
- self.build_pipes[pipe].read()
-
- def finish(self):
- self.rq.state = runQueueCleanUp
-
- for pipe in self.build_pipes:
- self.build_pipes[pipe].read()
-
- if self.stats.active > 0:
- bb.event.fire(runQueueExitWait(self.stats.active), self.cfgData)
- self.runqueue_process_waitpid()
- return
-
- if len(self.failed_fnids) != 0:
- self.rq.state = runQueueFailed
- return
-
- self.rq.state = runQueueComplete
- return
-
- def fork_off_task(self, fn, task, taskname, quieterrors=False):
- the_data = bb.cache.Cache.loadDataFull(fn, self.cooker.get_file_appends(fn), self.cooker.configuration.data)
-
- env = bb.data.export_vars(the_data)
- env = bb.data.export_envvars(env, the_data)
-
- taskdep = self.rqdata.dataCache.task_deps[fn]
- if 'fakeroot' in taskdep and taskname in taskdep['fakeroot']:
- envvars = the_data.getVar("FAKEROOTENV", True).split()
- for var in envvars:
- comps = var.split("=")
- env[comps[0]] = comps[1]
- fakedirs = (the_data.getVar("FAKEROOTDIRS", True) or "").split()
- for p in fakedirs:
- bb.mkdirhier(p)
- logger.debug(2, "Running %s:%s under fakeroot, state dir is %s" % (fn, taskname, fakedirs))
-
- envbackup = os.environ.copy()
- for e in envbackup:
- os.unsetenv(e)
- for e in env:
- os.putenv(e, env[e])
-
- sys.stdout.flush()
- sys.stderr.flush()
- try:
- pipein, pipeout = os.pipe()
- pipein = os.fdopen(pipein, 'rb', 4096)
- pipeout = os.fdopen(pipeout, 'wb', 0)
- pid = os.fork()
- except OSError as e:
- bb.msg.fatal(bb.msg.domain.RunQueue, "fork failed: %d (%s)" % (e.errno, e.strerror))
- if pid == 0:
- pipein.close()
-
- # Save out the PID so that the event can include it the
- # events
- bb.event.worker_pid = os.getpid()
- bb.event.worker_pipe = pipeout
- bb.event.useStdout = False
-
- # Child processes should send their messages to the UI
- # process via the server process, not print them
- # themselves
- bblogger.handlers = [bb.event.LogHandler()]
-
- self.rq.state = runQueueChildProcess
- # Make the child the process group leader
- os.setpgid(0, 0)
- # No stdin
- newsi = os.open(os.devnull, os.O_RDWR)
- os.dup2(newsi, sys.stdin.fileno())
- if quieterrors:
- the_data.setVarFlag(taskname, "quieterrors", "1")
-
- bb.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", self, self.cooker.configuration.data)
- bb.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2", fn, self.cooker.configuration.data)
- bb.data.setVar("BB_WORKERCONTEXT", "1", the_data)
- bb.parse.siggen.set_taskdata(self.rqdata.hashes, self.rqdata.hash_deps)
-
- for h in self.rqdata.hashes:
- bb.data.setVar("BBHASH_%s" % h, self.rqdata.hashes[h], the_data)
- for h in self.rqdata.hash_deps:
- bb.data.setVar("BBHASHDEPS_%s" % h, self.rqdata.hash_deps[h], the_data)
-
- bb.data.setVar("BB_TASKHASH", self.rqdata.runq_hash[task], the_data)
-
- ret = 0
- try:
- if not self.cooker.configuration.dry_run:
- ret = bb.build.exec_task(fn, taskname, the_data)
- os._exit(ret)
- except:
- os._exit(1)
-
- for e in env:
- os.unsetenv(e)
- for e in envbackup:
- os.putenv(e, envbackup[e])
-
- return pid, pipein, pipeout
-
-class RunQueueExecuteDummy(RunQueueExecute):
- def __init__(self, rq):
- self.rq = rq
- self.stats = RunQueueStats(0)
-
- def finish(self):
- self.rq.state = runQueueComplete
- return
-
-class RunQueueExecuteTasks(RunQueueExecute):
- def __init__(self, rq):
- RunQueueExecute.__init__(self, rq)
-
- self.stats = RunQueueStats(len(self.rqdata.runq_fnid))
-
- # Mark initial buildable tasks
- for task in xrange(self.stats.total):
- self.runq_running.append(0)
- self.runq_complete.append(0)
- if len(self.rqdata.runq_depends[task]) == 0:
- self.runq_buildable.append(1)
- else:
- self.runq_buildable.append(0)
- if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered):
- self.rq.scenequeue_covered.add(task)
-
- found = True
- while found:
- found = False
- for task in xrange(self.stats.total):
- if task in self.rq.scenequeue_covered:
- continue
- if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered):
- self.rq.scenequeue_covered.add(task)
- found = True
-
- logger.debug(1, 'Full skip list %s', self.rq.scenequeue_covered)
-
- for task in self.rq.scenequeue_covered:
- self.task_skip(task)
-
- event.fire(bb.event.StampUpdate(self.rqdata.target_pairs, self.rqdata.dataCache.stamp), self.cfgData)
-
- schedulers = self.get_schedulers()
- for scheduler in schedulers:
- if self.scheduler == scheduler.name:
- self.sched = scheduler(self, self.rqdata)
- logger.debug(1, "Using runqueue scheduler '%s'", scheduler.name)
- break
- else:
- bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" %
- (self.scheduler, ", ".join(obj.name for obj in schedulers)))
-
-
- def get_schedulers(self):
- schedulers = set(obj for obj in globals().values()
- if type(obj) is type and
- issubclass(obj, RunQueueScheduler))
-
- user_schedulers = bb.data.getVar("BB_SCHEDULERS", self.cfgData, True)
- if user_schedulers:
- for sched in user_schedulers.split():
- if not "." in sched:
- bb.note("Ignoring scheduler '%s' from BB_SCHEDULERS: not an import" % sched)
- continue
-
- modname, name = sched.rsplit(".", 1)
- try:
- module = __import__(modname, fromlist=(name,))
- except ImportError, exc:
- logger.critical("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc))
- raise SystemExit(1)
- else:
- schedulers.add(getattr(module, name))
- return schedulers
-
- def task_completeoutright(self, task):
- """
- Mark a task as completed
- Look at the reverse dependencies and mark any task with
- completed dependencies as buildable
- """
- self.runq_complete[task] = 1
- for revdep in self.rqdata.runq_revdeps[task]:
- if self.runq_running[revdep] == 1:
- continue
- if self.runq_buildable[revdep] == 1:
- continue
- alldeps = 1
- for dep in self.rqdata.runq_depends[revdep]:
- if self.runq_complete[dep] != 1:
- alldeps = 0
- if alldeps == 1:
- self.runq_buildable[revdep] = 1
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[revdep]]
- taskname = self.rqdata.runq_task[revdep]
- logger.debug(1, "Marking task %s (%s, %s) as buildable", revdep, fn, taskname)
-
- def task_complete(self, task):
- self.stats.taskCompleted()
- bb.event.fire(runQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
- self.task_completeoutright(task)
-
- def task_fail(self, task, exitcode):
- """
- Called when a task has failed
- Updates the state engine with the failure
- """
- self.stats.taskFailed()
- fnid = self.rqdata.runq_fnid[task]
- self.failed_fnids.append(fnid)
- bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq), self.cfgData)
- if self.rqdata.taskData.abort:
- self.rq.state = runQueueCleanUp
-
- def task_skip(self, task):
- self.runq_running[task] = 1
- self.runq_buildable[task] = 1
- self.task_completeoutright(task)
- self.stats.taskCompleted()
- self.stats.taskSkipped()
-
- def execute(self):
- """
- Run the tasks in a queue prepared by rqdata.prepare()
- """
-
- if self.stats.total == 0:
- # nothing to do
- self.rq.state = runQueueCleanUp
-
- task = self.sched.next()
- if task is not None:
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
-
- taskname = self.rqdata.runq_task[task]
- if self.rq.check_stamp_task(task, taskname):
- logger.debug(2, "Stamp current task %s (%s)", task,
- self.rqdata.get_user_idstring(task))
- self.task_skip(task)
- return True
-
- taskdep = self.rqdata.dataCache.task_deps[fn]
- if 'noexec' in taskdep and taskname in taskdep['noexec']:
- startevent = runQueueTaskStarted(task, self.stats, self.rq,
- noexec=True)
- bb.event.fire(startevent, self.cfgData)
- self.runq_running[task] = 1
- self.stats.taskActive()
- bb.build.make_stamp(taskname, self.rqdata.dataCache, fn)
- self.task_complete(task)
- return True
- else:
- startevent = runQueueTaskStarted(task, self.stats, self.rq)
- bb.event.fire(startevent, self.cfgData)
-
- pid, pipein, pipeout = self.fork_off_task(fn, task, taskname)
-
- self.build_pids[pid] = task
- self.build_pipes[pid] = runQueuePipe(pipein, pipeout, self.cfgData)
- self.runq_running[task] = 1
- self.stats.taskActive()
- if self.stats.active < self.number_tasks:
- return True
-
- for pipe in self.build_pipes:
- self.build_pipes[pipe].read()
-
- if self.stats.active > 0:
- if self.runqueue_process_waitpid() is None:
- return 0.5
- return True
-
- if len(self.failed_fnids) != 0:
- self.rq.state = runQueueFailed
- return True
-
- # Sanity Checks
- for task in xrange(self.stats.total):
- if self.runq_buildable[task] == 0:
- logger.error("Task %s never buildable!", task)
- if self.runq_running[task] == 0:
- logger.error("Task %s never ran!", task)
- if self.runq_complete[task] == 0:
- logger.error("Task %s never completed!", task)
- self.rq.state = runQueueComplete
- return True
-
-class RunQueueExecuteScenequeue(RunQueueExecute):
- def __init__(self, rq):
- RunQueueExecute.__init__(self, rq)
-
- self.scenequeue_covered = set()
- self.scenequeue_notcovered = set()
-
- # If we don't have any setscene functions, skip this step
- if len(self.rqdata.runq_setscene) == 0:
- rq.scenequeue_covered = set()
- rq.state = runQueueRunInit
- return
-
- self.stats = RunQueueStats(len(self.rqdata.runq_setscene))
-
- endpoints = {}
- sq_revdeps = []
- sq_revdeps_new = []
- sq_revdeps_squash = []
-
- # We need to construct a dependency graph for the setscene functions. Intermediate
- # dependencies between the setscene tasks only complicate the code. This code
- # therefore aims to collapse the huge runqueue dependency tree into a smaller one
- # only containing the setscene functions.
-
- for task in xrange(self.stats.total):
- self.runq_running.append(0)
- self.runq_complete.append(0)
- self.runq_buildable.append(0)
-
- for task in xrange(len(self.rqdata.runq_fnid)):
- sq_revdeps.append(copy.copy(self.rqdata.runq_revdeps[task]))
- sq_revdeps_new.append(set())
- if (len(self.rqdata.runq_revdeps[task]) == 0) and task not in self.rqdata.runq_setscene:
- endpoints[task] = None
-
- for task in self.rqdata.runq_setscene:
- for dep in self.rqdata.runq_depends[task]:
- endpoints[dep] = task
-
- def process_endpoints(endpoints):
- newendpoints = {}
- for point, task in endpoints.items():
- tasks = set()
- if task:
- tasks.add(task)
- if sq_revdeps_new[point]:
- tasks |= sq_revdeps_new[point]
- sq_revdeps_new[point] = set()
- for dep in self.rqdata.runq_depends[point]:
- if point in sq_revdeps[dep]:
- sq_revdeps[dep].remove(point)
- if tasks:
- sq_revdeps_new[dep] |= tasks
- if (len(sq_revdeps[dep]) == 0 or len(sq_revdeps_new[dep]) != 0) and dep not in self.rqdata.runq_setscene:
- newendpoints[dep] = task
- if len(newendpoints) != 0:
- process_endpoints(newendpoints)
-
- process_endpoints(endpoints)
-
- for task in xrange(len(self.rqdata.runq_fnid)):
- if task in self.rqdata.runq_setscene:
- deps = set()
- for dep in sq_revdeps_new[task]:
- deps.add(self.rqdata.runq_setscene.index(dep))
- sq_revdeps_squash.append(deps)
- elif len(sq_revdeps_new[task]) != 0:
- bb.msg.fatal(bb.msg.domain.RunQueue, "Something went badly wrong during scenequeue generation, aborting. Please report this problem.")
-
- #for task in xrange(len(sq_revdeps_squash)):
- # print "Task %s: %s.%s is %s " % (task, self.taskData.fn_index[self.runq_fnid[self.runq_setscene[task]]], self.runq_task[self.runq_setscene[task]] + "_setscene", sq_revdeps_squash[task])
-
- self.sq_deps = []
- self.sq_revdeps = sq_revdeps_squash
- self.sq_revdeps2 = copy.deepcopy(self.sq_revdeps)
-
- for task in xrange(len(self.sq_revdeps)):
- self.sq_deps.append(set())
- for task in xrange(len(self.sq_revdeps)):
- for dep in self.sq_revdeps[task]:
- self.sq_deps[dep].add(task)
-
- for task in xrange(len(self.sq_revdeps)):
- if len(self.sq_revdeps[task]) == 0:
- self.runq_buildable[task] = 1
-
- if self.rq.hashvalidate:
- sq_hash = []
- sq_hashfn = []
- sq_fn = []
- sq_taskname = []
- sq_task = []
- noexec = []
- for task in xrange(len(self.sq_revdeps)):
- realtask = self.rqdata.runq_setscene[task]
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]]
- taskname = self.rqdata.runq_task[realtask]
- taskdep = self.rqdata.dataCache.task_deps[fn]
- if 'noexec' in taskdep and taskname in taskdep['noexec']:
- noexec.append(task)
- self.task_skip(task)
- bb.build.make_stamp(taskname + "_setscene", self.rqdata.dataCache, fn)
- continue
- sq_fn.append(fn)
- sq_hashfn.append(self.rqdata.dataCache.hashfn[fn])
- sq_hash.append(self.rqdata.runq_hash[realtask])
- sq_taskname.append(taskname)
- sq_task.append(task)
- call = self.rq.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
- locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.configuration.data }
- valid = bb.utils.better_eval(call, locs)
-
- valid_new = []
- for v in valid:
- valid_new.append(sq_task[v])
-
- for task in xrange(len(self.sq_revdeps)):
- if task not in valid_new and task not in noexec:
- logger.debug(2, 'No package found, so skipping setscene task %s',
- self.rqdata.get_user_idstring(task))
- self.task_failoutright(task)
-
- logger.info('Executing SetScene Tasks')
-
- self.rq.state = runQueueSceneRun
-
- def scenequeue_updatecounters(self, task):
- for dep in self.sq_deps[task]:
- self.sq_revdeps2[dep].remove(task)
- if len(self.sq_revdeps2[dep]) == 0:
- self.runq_buildable[dep] = 1
-
- def task_completeoutright(self, task):
- """
- Mark a task as completed
- Look at the reverse dependencies and mark any task with
- completed dependencies as buildable
- """
-
- index = self.rqdata.runq_setscene[task]
- logger.debug(1, 'Found task %s which could be accelerated',
- self.rqdata.get_user_idstring(index))
-
- self.scenequeue_covered.add(task)
- self.scenequeue_updatecounters(task)
-
- def task_complete(self, task):
- self.stats.taskCompleted()
- self.task_completeoutright(task)
-
- def task_fail(self, task, result):
- self.stats.taskFailed()
- index = self.rqdata.runq_setscene[task]
- bb.event.fire(runQueueTaskFailed(task, self.stats, result, self), self.cfgData)
- self.scenequeue_notcovered.add(task)
- self.scenequeue_updatecounters(task)
-
- def task_failoutright(self, task):
- self.runq_running[task] = 1
- self.runq_buildable[task] = 1
- self.stats.taskCompleted()
- self.stats.taskSkipped()
- index = self.rqdata.runq_setscene[task]
- self.scenequeue_notcovered.add(task)
- self.scenequeue_updatecounters(task)
-
- def task_skip(self, task):
- self.runq_running[task] = 1
- self.runq_buildable[task] = 1
- self.task_completeoutright(task)
- self.stats.taskCompleted()
- self.stats.taskSkipped()
-
- def execute(self):
- """
- Run the tasks in a queue prepared by prepare_runqueue
- """
-
- task = None
- if self.stats.active < self.number_tasks:
- # Find the next setscene to run
- for nexttask in xrange(self.stats.total):
- if self.runq_buildable[nexttask] == 1 and self.runq_running[nexttask] != 1:
- task = nexttask
- break
- if task is not None:
- realtask = self.rqdata.runq_setscene[task]
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]]
-
- taskname = self.rqdata.runq_task[realtask] + "_setscene"
- if self.rq.check_stamp_task(realtask, self.rqdata.runq_task[realtask]):
- logger.debug(2, 'Stamp for underlying task %s(%s) is current, so skipping setscene variant',
- task, self.rqdata.get_user_idstring(task))
- self.task_failoutright(task)
- return True
-
- if self.cooker.configuration.force:
- for target in self.rqdata.target_pairs:
- if target[0] == fn and target[1] == self.rqdata.runq_task[realtask]:
- self.task_failoutright(task)
- return True
-
- if self.rq.check_stamp_task(realtask, taskname):
- logger.debug(2, 'Setscene stamp current task %s(%s), so skip it and its dependencies',
- task, self.rqdata.get_user_idstring(realtask))
- self.task_skip(task)
- return True
-
- logger.info("Running setscene task %d of %d (%s:%s)" % (self.stats.completed + self.stats.active + self.stats.failed + 1,
- self.stats.total, fn, taskname))
-
- pid, pipein, pipeout = self.fork_off_task(fn, realtask, taskname)
-
- self.build_pids[pid] = task
- self.build_pipes[pid] = runQueuePipe(pipein, pipeout, self.cfgData)
- self.runq_running[task] = 1
- self.stats.taskActive()
- if self.stats.active < self.number_tasks:
- return True
-
- for pipe in self.build_pipes:
- self.build_pipes[pipe].read()
-
- if self.stats.active > 0:
- if self.runqueue_process_waitpid() is None:
- return 0.5
- return True
-
- # Convert scenequeue_covered task numbers into full taskgraph ids
- oldcovered = self.scenequeue_covered
- self.rq.scenequeue_covered = set()
- for task in oldcovered:
- self.rq.scenequeue_covered.add(self.rqdata.runq_setscene[task])
-
- logger.debug(1, 'We can skip tasks %s', self.rq.scenequeue_covered)
-
- self.rq.state = runQueueRunInit
- return True
-
- def fork_off_task(self, fn, task, taskname):
- return RunQueueExecute.fork_off_task(self, fn, task, taskname, quieterrors=True)
-
-class TaskFailure(Exception):
- """
- Exception raised when a task in a runqueue fails
- """
- def __init__(self, x):
- self.args = x
-
-
-class runQueueExitWait(bb.event.Event):
- """
- Event when waiting for task processes to exit
- """
-
- def __init__(self, remain):
- self.remain = remain
- self.message = "Waiting for %s active tasks to finish" % remain
- bb.event.Event.__init__(self)
-
-class runQueueEvent(bb.event.Event):
- """
- Base runQueue event class
- """
- def __init__(self, task, stats, rq):
- self.taskid = task
- self.taskstring = rq.rqdata.get_user_idstring(task)
- self.stats = stats.copy()
- bb.event.Event.__init__(self)
-
-class runQueueTaskStarted(runQueueEvent):
- """
- Event notifing a task was started
- """
- def __init__(self, task, stats, rq, noexec=False):
- runQueueEvent.__init__(self, task, stats, rq)
- self.noexec = noexec
-
-class runQueueTaskFailed(runQueueEvent):
- """
- Event notifing a task failed
- """
- def __init__(self, task, stats, exitcode, rq):
- runQueueEvent.__init__(self, task, stats, rq)
- self.exitcode = exitcode
-
-class runQueueTaskCompleted(runQueueEvent):
- """
- Event notifing a task completed
- """
-
-def check_stamp_fn(fn, taskname, d):
- rqexe = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", d)
- fn = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2", d)
- fnid = rqexe.rqdata.taskData.getfn_id(fn)
- taskid = rqexe.rqdata.get_task_id(fnid, taskname)
- if taskid is not None:
- return rqexe.rq.check_stamp_task(taskid)
- return None
-
-class runQueuePipe():
- """
- Abstraction for a pipe between a worker thread and the server
- """
- def __init__(self, pipein, pipeout, d):
- self.input = pipein
- pipeout.close()
- fcntl.fcntl(self.input, fcntl.F_SETFL, fcntl.fcntl(self.input, fcntl.F_GETFL) | os.O_NONBLOCK)
- self.queue = ""
- self.d = d
-
- def read(self):
- start = len(self.queue)
- try:
- self.queue = self.queue + self.input.read(102400)
- except (OSError, IOError):
- pass
- end = len(self.queue)
- index = self.queue.find("</event>")
- while index != -1:
- bb.event.fire_from_worker(self.queue[:index+8], self.d)
- self.queue = self.queue[index+8:]
- index = self.queue.find("</event>")
- return (end > start)
-
- def close(self):
- while self.read():
- continue
- if len(self.queue) > 0:
- print("Warning, worker left partial message: %s" % self.queue)
- self.input.close()
diff --git a/bitbake/lib/bb/server/__init__.py b/bitbake/lib/bb/server/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
--- a/bitbake/lib/bb/server/__init__.py
+++ /dev/null
diff --git a/bitbake/lib/bb/server/none.py b/bitbake/lib/bb/server/none.py
deleted file mode 100644
index be0fb8f776..0000000000
--- a/bitbake/lib/bb/server/none.py
+++ /dev/null
@@ -1,195 +0,0 @@
-#
-# BitBake 'dummy' Passthrough Server
-#
-# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
-# Copyright (C) 2006 - 2008 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-"""
- This module implements an xmlrpc server for BitBake.
-
- Use this by deriving a class from BitBakeXMLRPCServer and then adding
- methods which you want to "export" via XMLRPC. If the methods have the
- prefix xmlrpc_, then registering those function will happen automatically,
- if not, you need to call register_function.
-
- Use register_idle_function() to add a function which the xmlrpc server
- calls from within server_forever when no requests are pending. Make sure
- that those functions are non-blocking or else you will introduce latency
- in the server's main loop.
-"""
-
-import time
-import bb
-from bb.ui import uievent
-import xmlrpclib
-import pickle
-import signal
-
-DEBUG = False
-
-from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
-import inspect, select
-
-class BitBakeServerCommands():
- def __init__(self, server, cooker):
- self.cooker = cooker
- self.server = server
-
- def runCommand(self, command):
- """
- Run a cooker command on the server
- """
- #print "Running Command %s" % command
- return self.cooker.command.runCommand(command)
-
- def terminateServer(self):
- """
- Trigger the server to quit
- """
- self.server.server_exit()
- #print "Server (cooker) exitting"
- return
-
- def ping(self):
- """
- Dummy method which can be used to check the server is still alive
- """
- return True
-
-eventQueue = []
-
-class BBUIEventQueue:
- class event:
- def __init__(self, parent):
- self.parent = parent
- @staticmethod
- def send(event):
- bb.server.none.eventQueue.append(pickle.loads(event))
- @staticmethod
- def quit():
- return
-
- def __init__(self, BBServer):
- self.eventQueue = bb.server.none.eventQueue
- self.BBServer = BBServer
- self.EventHandle = bb.event.register_UIHhandler(self)
-
- def getEvent(self):
- if len(self.eventQueue) == 0:
- return None
-
- return self.eventQueue.pop(0)
-
- def waitEvent(self, delay):
- event = self.getEvent()
- if event:
- return event
- self.BBServer.idle_commands(delay)
- return self.getEvent()
-
- def queue_event(self, event):
- self.eventQueue.append(event)
-
- def system_quit( self ):
- bb.event.unregister_UIHhandler(self.EventHandle)
-
-# Dummy signal handler to ensure we break out of sleep upon SIGCHLD
-def chldhandler(signum, stackframe):
- pass
-
-class BitBakeServer():
- # remove this when you're done with debugging
- # allow_reuse_address = True
-
- def __init__(self, cooker):
- self._idlefuns = {}
- self.commands = BitBakeServerCommands(self, cooker)
-
- def register_idle_function(self, function, data):
- """Register a function to be called while the server is idle"""
- assert hasattr(function, '__call__')
- self._idlefuns[function] = data
-
- def idle_commands(self, delay):
- #print "Idle queue length %s" % len(self._idlefuns)
- #print "Idle timeout, running idle functions"
- #if len(self._idlefuns) == 0:
- nextsleep = delay
- for function, data in self._idlefuns.items():
- try:
- retval = function(self, data, False)
- #print "Idle function returned %s" % (retval)
- if retval is False:
- del self._idlefuns[function]
- elif retval is True:
- nextsleep = None
- elif nextsleep is None:
- continue
- elif retval < nextsleep:
- nextsleep = retval
- except SystemExit:
- raise
- except:
- import traceback
- traceback.print_exc()
- self.commands.runCommand(["stateShutdown"])
- pass
- if nextsleep is not None:
- #print "Sleeping for %s (%s)" % (nextsleep, delay)
- signal.signal(signal.SIGCHLD, chldhandler)
- time.sleep(nextsleep)
- signal.signal(signal.SIGCHLD, signal.SIG_DFL)
-
- def server_exit(self):
- # Tell idle functions we're exiting
- for function, data in self._idlefuns.items():
- try:
- retval = function(self, data, True)
- except:
- pass
-
-class BitbakeServerInfo():
- def __init__(self, server):
- self.server = server
- self.commands = server.commands
-
-class BitBakeServerFork():
- def __init__(self, cooker, server, serverinfo, logfile):
- serverinfo.logfile = logfile
- serverinfo.cooker = cooker
- serverinfo.server = server
-
-class BitbakeUILauch():
- def launch(self, serverinfo, uifunc, *args):
- return bb.cooker.server_main(serverinfo.cooker, uifunc, *args)
-
-class BitBakeServerConnection():
- def __init__(self, serverinfo):
- self.server = serverinfo.server
- self.connection = serverinfo.commands
- self.events = bb.server.none.BBUIEventQueue(self.server)
- for event in bb.event.ui_queue:
- self.events.queue_event(event)
-
- def terminate(self):
- try:
- self.events.system_quit()
- except:
- pass
- try:
- self.connection.terminateServer()
- except:
- pass
diff --git a/bitbake/lib/bb/server/xmlrpc.py b/bitbake/lib/bb/server/xmlrpc.py
deleted file mode 100644
index 0d03e308d0..0000000000
--- a/bitbake/lib/bb/server/xmlrpc.py
+++ /dev/null
@@ -1,260 +0,0 @@
-#
-# BitBake XMLRPC Server
-#
-# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
-# Copyright (C) 2006 - 2008 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-"""
- This module implements an xmlrpc server for BitBake.
-
- Use this by deriving a class from BitBakeXMLRPCServer and then adding
- methods which you want to "export" via XMLRPC. If the methods have the
- prefix xmlrpc_, then registering those function will happen automatically,
- if not, you need to call register_function.
-
- Use register_idle_function() to add a function which the xmlrpc server
- calls from within server_forever when no requests are pending. Make sure
- that those functions are non-blocking or else you will introduce latency
- in the server's main loop.
-"""
-
-import bb
-import xmlrpclib, sys
-from bb import daemonize
-from bb.ui import uievent
-
-DEBUG = False
-
-from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
-import inspect, select
-
-if sys.hexversion < 0x020600F0:
- print("Sorry, python 2.6 or later is required for bitbake's XMLRPC mode")
- sys.exit(1)
-
-##
-# The xmlrpclib.Transport class has undergone various changes in Python 2.7
-# which break BitBake's XMLRPC implementation.
-# To work around this we subclass Transport and have a copy/paste of method
-# implementations from Python 2.6.6's xmlrpclib.
-#
-# Upstream Python bug is #8194 (http://bugs.python.org/issue8194)
-##
-
-class BBTransport(xmlrpclib.Transport):
- def request(self, host, handler, request_body, verbose=0):
- h = self.make_connection(host)
- if verbose:
- h.set_debuglevel(1)
-
- self.send_request(h, handler, request_body)
- self.send_host(h, host)
- self.send_user_agent(h)
- self.send_content(h, request_body)
-
- errcode, errmsg, headers = h.getreply()
-
- if errcode != 200:
- raise ProtocolError(
- host + handler,
- errcode, errmsg,
- headers
- )
-
- self.verbose = verbose
-
- try:
- sock = h._conn.sock
- except AttributeError:
- sock = None
-
- return self._parse_response(h.getfile(), sock)
-
- def make_connection(self, host):
- import httplib
- host, extra_headers, x509 = self.get_host_info(host)
- return httplib.HTTP(host)
-
- def _parse_response(self, file, sock):
- p, u = self.getparser()
-
- while 1:
- if sock:
- response = sock.recv(1024)
- else:
- response = file.read(1024)
- if not response:
- break
- if self.verbose:
- print "body:", repr(response)
- p.feed(response)
-
- file.close()
- p.close()
-
- return u.close()
-
-class BitBakeServerCommands():
- def __init__(self, server, cooker):
- self.cooker = cooker
- self.server = server
-
- def registerEventHandler(self, host, port):
- """
- Register a remote UI Event Handler
- """
- t = BBTransport()
- s = xmlrpclib.Server("http://%s:%d/" % (host, port), transport=t, allow_none=True)
- return bb.event.register_UIHhandler(s)
-
- def unregisterEventHandler(self, handlerNum):
- """
- Unregister a remote UI Event Handler
- """
- return bb.event.unregister_UIHhandler(handlerNum)
-
- def runCommand(self, command):
- """
- Run a cooker command on the server
- """
- return self.cooker.command.runCommand(command)
-
- def terminateServer(self):
- """
- Trigger the server to quit
- """
- self.server.quit = True
- print("Server (cooker) exitting")
- return
-
- def ping(self):
- """
- Dummy method which can be used to check the server is still alive
- """
- return True
-
-class BitBakeServer(SimpleXMLRPCServer):
- # remove this when you're done with debugging
- # allow_reuse_address = True
-
- def __init__(self, cooker, interface = ("localhost", 0)):
- """
- Constructor
- """
- SimpleXMLRPCServer.__init__(self, interface,
- requestHandler=SimpleXMLRPCRequestHandler,
- logRequests=False, allow_none=True)
- self._idlefuns = {}
- self.host, self.port = self.socket.getsockname()
- #self.register_introspection_functions()
- commands = BitBakeServerCommands(self, cooker)
- self.autoregister_all_functions(commands, "")
- self.cooker = cooker
-
- def autoregister_all_functions(self, context, prefix):
- """
- Convenience method for registering all functions in the scope
- of this class that start with a common prefix
- """
- methodlist = inspect.getmembers(context, inspect.ismethod)
- for name, method in methodlist:
- if name.startswith(prefix):
- self.register_function(method, name[len(prefix):])
-
- def register_idle_function(self, function, data):
- """Register a function to be called while the server is idle"""
- assert hasattr(function, '__call__')
- self._idlefuns[function] = data
-
- def serve_forever(self):
- bb.cooker.server_main(self.cooker, self._serve_forever)
-
- def _serve_forever(self):
- """
- Serve Requests. Overloaded to honor a quit command
- """
- self.quit = False
- self.timeout = 0 # Run Idle calls for our first callback
- while not self.quit:
- #print "Idle queue length %s" % len(self._idlefuns)
- self.handle_request()
- #print "Idle timeout, running idle functions"
- nextsleep = None
- for function, data in self._idlefuns.items():
- try:
- retval = function(self, data, False)
- if retval is False:
- del self._idlefuns[function]
- elif retval is True:
- nextsleep = 0
- elif nextsleep is 0:
- continue
- elif nextsleep is None:
- nextsleep = retval
- elif retval < nextsleep:
- nextsleep = retval
- except SystemExit:
- raise
- except:
- import traceback
- traceback.print_exc()
- pass
- if nextsleep is None and len(self._idlefuns) > 0:
- nextsleep = 0
- self.timeout = nextsleep
- # Tell idle functions we're exiting
- for function, data in self._idlefuns.items():
- try:
- retval = function(self, data, True)
- except:
- pass
-
- self.server_close()
- return
-
-class BitbakeServerInfo():
- def __init__(self, server):
- self.host = server.host
- self.port = server.port
-
-class BitBakeServerFork():
- def __init__(self, cooker, server, serverinfo, logfile):
- daemonize.createDaemon(server.serve_forever, logfile)
-
-class BitbakeUILauch():
- def launch(self, serverinfo, uifunc, *args):
- return uifunc(*args)
-
-class BitBakeServerConnection():
- def __init__(self, serverinfo):
- t = BBTransport()
- self.connection = xmlrpclib.Server("http://%s:%s" % (serverinfo.host, serverinfo.port), transport=t, allow_none=True)
- self.events = uievent.BBUIEventQueue(self.connection)
- for event in bb.event.ui_queue:
- self.events.queue_event(event)
-
- def terminate(self):
- # Don't wait for server indefinitely
- import socket
- socket.setdefaulttimeout(2)
- try:
- self.events.system_quit()
- except:
- pass
- try:
- self.connection.terminateServer()
- except:
- pass
diff --git a/bitbake/lib/bb/shell.py b/bitbake/lib/bb/shell.py
deleted file mode 100644
index 3319e2d1cc..0000000000
--- a/bitbake/lib/bb/shell.py
+++ /dev/null
@@ -1,820 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-##########################################################################
-#
-# Copyright (C) 2005-2006 Michael 'Mickey' Lauer <mickey@Vanille.de>
-# Copyright (C) 2005-2006 Vanille Media
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-##########################################################################
-#
-# Thanks to:
-# * Holger Freyther <zecke@handhelds.org>
-# * Justin Patrin <papercrane@reversefold.com>
-#
-##########################################################################
-
-"""
-BitBake Shell
-
-IDEAS:
- * list defined tasks per package
- * list classes
- * toggle force
- * command to reparse just one (or more) bbfile(s)
- * automatic check if reparsing is necessary (inotify?)
- * frontend for bb file manipulation
- * more shell-like features:
- - output control, i.e. pipe output into grep, sort, etc.
- - job control, i.e. bring running commands into background and foreground
- * start parsing in background right after startup
- * ncurses interface
-
-PROBLEMS:
- * force doesn't always work
- * readline completion for commands with more than one parameters
-
-"""
-
-##########################################################################
-# Import and setup global variables
-##########################################################################
-
-from __future__ import print_function
-from functools import reduce
-try:
- set
-except NameError:
- from sets import Set as set
-import sys, os, readline, socket, httplib, urllib, commands, popen2, shlex, Queue, fnmatch
-from bb import data, parse, build, cache, taskdata, runqueue, providers as Providers
-
-__version__ = "0.5.3.1"
-__credits__ = """BitBake Shell Version %s (C) 2005 Michael 'Mickey' Lauer <mickey@Vanille.de>
-Type 'help' for more information, press CTRL-D to exit.""" % __version__
-
-cmds = {}
-leave_mainloop = False
-last_exception = None
-cooker = None
-parsed = False
-debug = os.environ.get( "BBSHELL_DEBUG", "" )
-
-##########################################################################
-# Class BitBakeShellCommands
-##########################################################################
-
-class BitBakeShellCommands:
- """This class contains the valid commands for the shell"""
-
- def __init__( self, shell ):
- """Register all the commands"""
- self._shell = shell
- for attr in BitBakeShellCommands.__dict__:
- if not attr.startswith( "_" ):
- if attr.endswith( "_" ):
- command = attr[:-1].lower()
- else:
- command = attr[:].lower()
- method = getattr( BitBakeShellCommands, attr )
- debugOut( "registering command '%s'" % command )
- # scan number of arguments
- usage = getattr( method, "usage", "" )
- if usage != "<...>":
- numArgs = len( usage.split() )
- else:
- numArgs = -1
- shell.registerCommand( command, method, numArgs, "%s %s" % ( command, usage ), method.__doc__ )
-
- def _checkParsed( self ):
- if not parsed:
- print("SHELL: This command needs to parse bbfiles...")
- self.parse( None )
-
- def _findProvider( self, item ):
- self._checkParsed()
- # Need to use taskData for this information
- preferred = data.getVar( "PREFERRED_PROVIDER_%s" % item, cooker.configuration.data, 1 )
- if not preferred: preferred = item
- try:
- lv, lf, pv, pf = Providers.findBestProvider(preferred, cooker.configuration.data, cooker.status)
- except KeyError:
- if item in cooker.status.providers:
- pf = cooker.status.providers[item][0]
- else:
- pf = None
- return pf
-
- def alias( self, params ):
- """Register a new name for a command"""
- new, old = params
- if not old in cmds:
- print("ERROR: Command '%s' not known" % old)
- else:
- cmds[new] = cmds[old]
- print("OK")
- alias.usage = "<alias> <command>"
-
- def buffer( self, params ):
- """Dump specified output buffer"""
- index = params[0]
- print(self._shell.myout.buffer( int( index ) ))
- buffer.usage = "<index>"
-
- def buffers( self, params ):
- """Show the available output buffers"""
- commands = self._shell.myout.bufferedCommands()
- if not commands:
- print("SHELL: No buffered commands available yet. Start doing something.")
- else:
- print("="*35, "Available Output Buffers", "="*27)
- for index, cmd in enumerate( commands ):
- print("| %s %s" % ( str( index ).ljust( 3 ), cmd ))
- print("="*88)
-
- def build( self, params, cmd = "build" ):
- """Build a providee"""
- global last_exception
- globexpr = params[0]
- self._checkParsed()
- names = globfilter( cooker.status.pkg_pn, globexpr )
- if len( names ) == 0: names = [ globexpr ]
- print("SHELL: Building %s" % ' '.join( names ))
-
- td = taskdata.TaskData(cooker.configuration.abort)
- localdata = data.createCopy(cooker.configuration.data)
- data.update_data(localdata)
- data.expandKeys(localdata)
-
- try:
- tasks = []
- for name in names:
- td.add_provider(localdata, cooker.status, name)
- providers = td.get_provider(name)
-
- if len(providers) == 0:
- raise Providers.NoProvider
-
- tasks.append([name, "do_%s" % cmd])
-
- td.add_unresolved(localdata, cooker.status)
-
- rq = runqueue.RunQueue(cooker, localdata, cooker.status, td, tasks)
- rq.prepare_runqueue()
- rq.execute_runqueue()
-
- except Providers.NoProvider:
- print("ERROR: No Provider")
- last_exception = Providers.NoProvider
-
- except runqueue.TaskFailure as fnids:
- last_exception = runqueue.TaskFailure
-
- except build.FuncFailed as e:
- print("ERROR: Couldn't build '%s'" % names)
- last_exception = e
-
-
- build.usage = "<providee>"
-
- def clean( self, params ):
- """Clean a providee"""
- self.build( params, "clean" )
- clean.usage = "<providee>"
-
- def compile( self, params ):
- """Execute 'compile' on a providee"""
- self.build( params, "compile" )
- compile.usage = "<providee>"
-
- def configure( self, params ):
- """Execute 'configure' on a providee"""
- self.build( params, "configure" )
- configure.usage = "<providee>"
-
- def install( self, params ):
- """Execute 'install' on a providee"""
- self.build( params, "install" )
- install.usage = "<providee>"
-
- def edit( self, params ):
- """Call $EDITOR on a providee"""
- name = params[0]
- bbfile = self._findProvider( name )
- if bbfile is not None:
- os.system( "%s %s" % ( os.environ.get( "EDITOR", "vi" ), bbfile ) )
- else:
- print("ERROR: Nothing provides '%s'" % name)
- edit.usage = "<providee>"
-
- def environment( self, params ):
- """Dump out the outer BitBake environment"""
- cooker.showEnvironment()
-
- def exit_( self, params ):
- """Leave the BitBake Shell"""
- debugOut( "setting leave_mainloop to true" )
- global leave_mainloop
- leave_mainloop = True
-
- def fetch( self, params ):
- """Fetch a providee"""
- self.build( params, "fetch" )
- fetch.usage = "<providee>"
-
- def fileBuild( self, params, cmd = "build" ):
- """Parse and build a .bb file"""
- global last_exception
- name = params[0]
- bf = completeFilePath( name )
- print("SHELL: Calling '%s' on '%s'" % ( cmd, bf ))
-
- try:
- cooker.buildFile(bf, cmd)
- except parse.ParseError:
- print("ERROR: Unable to open or parse '%s'" % bf)
- except build.FuncFailed as e:
- print("ERROR: Couldn't build '%s'" % name)
- last_exception = e
-
- fileBuild.usage = "<bbfile>"
-
- def fileClean( self, params ):
- """Clean a .bb file"""
- self.fileBuild( params, "clean" )
- fileClean.usage = "<bbfile>"
-
- def fileEdit( self, params ):
- """Call $EDITOR on a .bb file"""
- name = params[0]
- os.system( "%s %s" % ( os.environ.get( "EDITOR", "vi" ), completeFilePath( name ) ) )
- fileEdit.usage = "<bbfile>"
-
- def fileRebuild( self, params ):
- """Rebuild (clean & build) a .bb file"""
- self.fileBuild( params, "rebuild" )
- fileRebuild.usage = "<bbfile>"
-
- def fileReparse( self, params ):
- """(re)Parse a bb file"""
- bbfile = params[0]
- print("SHELL: Parsing '%s'" % bbfile)
- parse.update_mtime( bbfile )
- cooker.parser.reparse(bbfile)
- if False: #fromCache:
- print("SHELL: File has not been updated, not reparsing")
- else:
- print("SHELL: Parsed")
- fileReparse.usage = "<bbfile>"
-
- def abort( self, params ):
- """Toggle abort task execution flag (see bitbake -k)"""
- cooker.configuration.abort = not cooker.configuration.abort
- print("SHELL: Abort Flag is now '%s'" % repr( cooker.configuration.abort ))
-
- def force( self, params ):
- """Toggle force task execution flag (see bitbake -f)"""
- cooker.configuration.force = not cooker.configuration.force
- print("SHELL: Force Flag is now '%s'" % repr( cooker.configuration.force ))
-
- def help( self, params ):
- """Show a comprehensive list of commands and their purpose"""
- print("="*30, "Available Commands", "="*30)
- for cmd in sorted(cmds):
- function, numparams, usage, helptext = cmds[cmd]
- print("| %s | %s" % (usage.ljust(30), helptext))
- print("="*78)
-
- def lastError( self, params ):
- """Show the reason or log that was produced by the last BitBake event exception"""
- if last_exception is None:
- print("SHELL: No Errors yet (Phew)...")
- else:
- reason, event = last_exception.args
- print("SHELL: Reason for the last error: '%s'" % reason)
- if ':' in reason:
- msg, filename = reason.split( ':' )
- filename = filename.strip()
- print("SHELL: Dumping log file for last error:")
- try:
- print(open( filename ).read())
- except IOError:
- print("ERROR: Couldn't open '%s'" % filename)
-
- def match( self, params ):
- """Dump all files or providers matching a glob expression"""
- what, globexpr = params
- if what == "files":
- self._checkParsed()
- for key in globfilter( cooker.status.pkg_fn, globexpr ): print(key)
- elif what == "providers":
- self._checkParsed()
- for key in globfilter( cooker.status.pkg_pn, globexpr ): print(key)
- else:
- print("Usage: match %s" % self.print_.usage)
- match.usage = "<files|providers> <glob>"
-
- def new( self, params ):
- """Create a new .bb file and open the editor"""
- dirname, filename = params
- packages = '/'.join( data.getVar( "BBFILES", cooker.configuration.data, 1 ).split('/')[:-2] )
- fulldirname = "%s/%s" % ( packages, dirname )
-
- if not os.path.exists( fulldirname ):
- print("SHELL: Creating '%s'" % fulldirname)
- os.mkdir( fulldirname )
- if os.path.exists( fulldirname ) and os.path.isdir( fulldirname ):
- if os.path.exists( "%s/%s" % ( fulldirname, filename ) ):
- print("SHELL: ERROR: %s/%s already exists" % ( fulldirname, filename ))
- return False
- print("SHELL: Creating '%s/%s'" % ( fulldirname, filename ))
- newpackage = open( "%s/%s" % ( fulldirname, filename ), "w" )
- print("""DESCRIPTION = ""
-SECTION = ""
-AUTHOR = ""
-HOMEPAGE = ""
-MAINTAINER = ""
-LICENSE = "GPL"
-PR = "r0"
-
-SRC_URI = ""
-
-#inherit base
-
-#do_configure() {
-#
-#}
-
-#do_compile() {
-#
-#}
-
-#do_stage() {
-#
-#}
-
-#do_install() {
-#
-#}
-""", file=newpackage)
- newpackage.close()
- os.system( "%s %s/%s" % ( os.environ.get( "EDITOR" ), fulldirname, filename ) )
- new.usage = "<directory> <filename>"
-
- def package( self, params ):
- """Execute 'package' on a providee"""
- self.build( params, "package" )
- package.usage = "<providee>"
-
- def pasteBin( self, params ):
- """Send a command + output buffer to the pastebin at http://rafb.net/paste"""
- index = params[0]
- contents = self._shell.myout.buffer( int( index ) )
- sendToPastebin( "output of " + params[0], contents )
- pasteBin.usage = "<index>"
-
- def pasteLog( self, params ):
- """Send the last event exception error log (if there is one) to http://rafb.net/paste"""
- if last_exception is None:
- print("SHELL: No Errors yet (Phew)...")
- else:
- reason, event = last_exception.args
- print("SHELL: Reason for the last error: '%s'" % reason)
- if ':' in reason:
- msg, filename = reason.split( ':' )
- filename = filename.strip()
- print("SHELL: Pasting log file to pastebin...")
-
- file = open( filename ).read()
- sendToPastebin( "contents of " + filename, file )
-
- def patch( self, params ):
- """Execute 'patch' command on a providee"""
- self.build( params, "patch" )
- patch.usage = "<providee>"
-
- def parse( self, params ):
- """(Re-)parse .bb files and calculate the dependency graph"""
- cooker.status = cache.CacheData()
- ignore = data.getVar("ASSUME_PROVIDED", cooker.configuration.data, 1) or ""
- cooker.status.ignored_dependencies = set( ignore.split() )
- cooker.handleCollections( data.getVar("BBFILE_COLLECTIONS", cooker.configuration.data, 1) )
-
- (filelist, masked) = cooker.collect_bbfiles()
- cooker.parse_bbfiles(filelist, masked, cooker.myProgressCallback)
- cooker.buildDepgraph()
- global parsed
- parsed = True
- print()
-
- def reparse( self, params ):
- """(re)Parse a providee's bb file"""
- bbfile = self._findProvider( params[0] )
- if bbfile is not None:
- print("SHELL: Found bbfile '%s' for '%s'" % ( bbfile, params[0] ))
- self.fileReparse( [ bbfile ] )
- else:
- print("ERROR: Nothing provides '%s'" % params[0])
- reparse.usage = "<providee>"
-
- def getvar( self, params ):
- """Dump the contents of an outer BitBake environment variable"""
- var = params[0]
- value = data.getVar( var, cooker.configuration.data, 1 )
- print(value)
- getvar.usage = "<variable>"
-
- def peek( self, params ):
- """Dump contents of variable defined in providee's metadata"""
- name, var = params
- bbfile = self._findProvider( name )
- if bbfile is not None:
- the_data = cache.Cache.loadDataFull(bbfile, cooker.configuration.data)
- value = the_data.getVar( var, 1 )
- print(value)
- else:
- print("ERROR: Nothing provides '%s'" % name)
- peek.usage = "<providee> <variable>"
-
- def poke( self, params ):
- """Set contents of variable defined in providee's metadata"""
- name, var, value = params
- bbfile = self._findProvider( name )
- if bbfile is not None:
- print("ERROR: Sorry, this functionality is currently broken")
- #d = cooker.pkgdata[bbfile]
- #data.setVar( var, value, d )
-
- # mark the change semi persistant
- #cooker.pkgdata.setDirty(bbfile, d)
- #print "OK"
- else:
- print("ERROR: Nothing provides '%s'" % name)
- poke.usage = "<providee> <variable> <value>"
-
- def print_( self, params ):
- """Dump all files or providers"""
- what = params[0]
- if what == "files":
- self._checkParsed()
- for key in cooker.status.pkg_fn: print(key)
- elif what == "providers":
- self._checkParsed()
- for key in cooker.status.providers: print(key)
- else:
- print("Usage: print %s" % self.print_.usage)
- print_.usage = "<files|providers>"
-
- def python( self, params ):
- """Enter the expert mode - an interactive BitBake Python Interpreter"""
- sys.ps1 = "EXPERT BB>>> "
- sys.ps2 = "EXPERT BB... "
- import code
- interpreter = code.InteractiveConsole( dict( globals() ) )
- interpreter.interact( "SHELL: Expert Mode - BitBake Python %s\nType 'help' for more information, press CTRL-D to switch back to BBSHELL." % sys.version )
-
- def showdata( self, params ):
- """Execute 'showdata' on a providee"""
- cooker.showEnvironment(None, params)
- showdata.usage = "<providee>"
-
- def setVar( self, params ):
- """Set an outer BitBake environment variable"""
- var, value = params
- data.setVar( var, value, cooker.configuration.data )
- print("OK")
- setVar.usage = "<variable> <value>"
-
- def rebuild( self, params ):
- """Clean and rebuild a .bb file or a providee"""
- self.build( params, "clean" )
- self.build( params, "build" )
- rebuild.usage = "<providee>"
-
- def shell( self, params ):
- """Execute a shell command and dump the output"""
- if params != "":
- print(commands.getoutput( " ".join( params ) ))
- shell.usage = "<...>"
-
- def stage( self, params ):
- """Execute 'stage' on a providee"""
- self.build( params, "populate_staging" )
- stage.usage = "<providee>"
-
- def status( self, params ):
- """<just for testing>"""
- print("-" * 78)
- print("building list = '%s'" % cooker.building_list)
- print("build path = '%s'" % cooker.build_path)
- print("consider_msgs_cache = '%s'" % cooker.consider_msgs_cache)
- print("build stats = '%s'" % cooker.stats)
- if last_exception is not None: print("last_exception = '%s'" % repr( last_exception.args ))
- print("memory output contents = '%s'" % self._shell.myout._buffer)
-
- def test( self, params ):
- """<just for testing>"""
- print("testCommand called with '%s'" % params)
-
- def unpack( self, params ):
- """Execute 'unpack' on a providee"""
- self.build( params, "unpack" )
- unpack.usage = "<providee>"
-
- def which( self, params ):
- """Computes the providers for a given providee"""
- # Need to use taskData for this information
- item = params[0]
-
- self._checkParsed()
-
- preferred = data.getVar( "PREFERRED_PROVIDER_%s" % item, cooker.configuration.data, 1 )
- if not preferred: preferred = item
-
- try:
- lv, lf, pv, pf = Providers.findBestProvider(preferred, cooker.configuration.data, cooker.status)
- except KeyError:
- lv, lf, pv, pf = (None,)*4
-
- try:
- providers = cooker.status.providers[item]
- except KeyError:
- print("SHELL: ERROR: Nothing provides", preferred)
- else:
- for provider in providers:
- if provider == pf: provider = " (***) %s" % provider
- else: provider = " %s" % provider
- print(provider)
- which.usage = "<providee>"
-
-##########################################################################
-# Common helper functions
-##########################################################################
-
-def completeFilePath( bbfile ):
- """Get the complete bbfile path"""
- if not cooker.status: return bbfile
- if not cooker.status.pkg_fn: return bbfile
- for key in cooker.status.pkg_fn:
- if key.endswith( bbfile ):
- return key
- return bbfile
-
-def sendToPastebin( desc, content ):
- """Send content to http://oe.pastebin.com"""
- mydata = {}
- mydata["lang"] = "Plain Text"
- mydata["desc"] = desc
- mydata["cvt_tabs"] = "No"
- mydata["nick"] = "%s@%s" % ( os.environ.get( "USER", "unknown" ), socket.gethostname() or "unknown" )
- mydata["text"] = content
- params = urllib.urlencode( mydata )
- headers = {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"}
-
- host = "rafb.net"
- conn = httplib.HTTPConnection( "%s:80" % host )
- conn.request("POST", "/paste/paste.php", params, headers )
-
- response = conn.getresponse()
- conn.close()
-
- if response.status == 302:
- location = response.getheader( "location" ) or "unknown"
- print("SHELL: Pasted to http://%s%s" % ( host, location ))
- else:
- print("ERROR: %s %s" % ( response.status, response.reason ))
-
-def completer( text, state ):
- """Return a possible readline completion"""
- debugOut( "completer called with text='%s', state='%d'" % ( text, state ) )
-
- if state == 0:
- line = readline.get_line_buffer()
- if " " in line:
- line = line.split()
- # we are in second (or more) argument
- if line[0] in cmds and hasattr( cmds[line[0]][0], "usage" ): # known command and usage
- u = getattr( cmds[line[0]][0], "usage" ).split()[0]
- if u == "<variable>":
- allmatches = cooker.configuration.data.keys()
- elif u == "<bbfile>":
- if cooker.status.pkg_fn is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
- else: allmatches = [ x.split("/")[-1] for x in cooker.status.pkg_fn ]
- elif u == "<providee>":
- if cooker.status.pkg_fn is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
- else: allmatches = cooker.status.providers.iterkeys()
- else: allmatches = [ "(No tab completion available for this command)" ]
- else: allmatches = [ "(No tab completion available for this command)" ]
- else:
- # we are in first argument
- allmatches = cmds.iterkeys()
-
- completer.matches = [ x for x in allmatches if x[:len(text)] == text ]
- #print "completer.matches = '%s'" % completer.matches
- if len( completer.matches ) > state:
- return completer.matches[state]
- else:
- return None
-
-def debugOut( text ):
- if debug:
- sys.stderr.write( "( %s )\n" % text )
-
-def columnize( alist, width = 80 ):
- """
- A word-wrap function that preserves existing line breaks
- and most spaces in the text. Expects that existing line
- breaks are posix newlines (\n).
- """
- return reduce(lambda line, word, width=width: '%s%s%s' %
- (line,
- ' \n'[(len(line[line.rfind('\n')+1:])
- + len(word.split('\n', 1)[0]
- ) >= width)],
- word),
- alist
- )
-
-def globfilter( names, pattern ):
- return fnmatch.filter( names, pattern )
-
-##########################################################################
-# Class MemoryOutput
-##########################################################################
-
-class MemoryOutput:
- """File-like output class buffering the output of the last 10 commands"""
- def __init__( self, delegate ):
- self.delegate = delegate
- self._buffer = []
- self.text = []
- self._command = None
-
- def startCommand( self, command ):
- self._command = command
- self.text = []
- def endCommand( self ):
- if self._command is not None:
- if len( self._buffer ) == 10: del self._buffer[0]
- self._buffer.append( ( self._command, self.text ) )
- def removeLast( self ):
- if self._buffer:
- del self._buffer[ len( self._buffer ) - 1 ]
- self.text = []
- self._command = None
- def lastBuffer( self ):
- if self._buffer:
- return self._buffer[ len( self._buffer ) -1 ][1]
- def bufferedCommands( self ):
- return [ cmd for cmd, output in self._buffer ]
- def buffer( self, i ):
- if i < len( self._buffer ):
- return "BB>> %s\n%s" % ( self._buffer[i][0], "".join( self._buffer[i][1] ) )
- else: return "ERROR: Invalid buffer number. Buffer needs to be in (0, %d)" % ( len( self._buffer ) - 1 )
- def write( self, text ):
- if self._command is not None and text != "BB>> ": self.text.append( text )
- if self.delegate is not None: self.delegate.write( text )
- def flush( self ):
- return self.delegate.flush()
- def fileno( self ):
- return self.delegate.fileno()
- def isatty( self ):
- return self.delegate.isatty()
-
-##########################################################################
-# Class BitBakeShell
-##########################################################################
-
-class BitBakeShell:
-
- def __init__( self ):
- """Register commands and set up readline"""
- self.commandQ = Queue.Queue()
- self.commands = BitBakeShellCommands( self )
- self.myout = MemoryOutput( sys.stdout )
- self.historyfilename = os.path.expanduser( "~/.bbsh_history" )
- self.startupfilename = os.path.expanduser( "~/.bbsh_startup" )
-
- readline.set_completer( completer )
- readline.set_completer_delims( " " )
- readline.parse_and_bind("tab: complete")
-
- try:
- readline.read_history_file( self.historyfilename )
- except IOError:
- pass # It doesn't exist yet.
-
- print(__credits__)
-
- def cleanup( self ):
- """Write readline history and clean up resources"""
- debugOut( "writing command history" )
- try:
- readline.write_history_file( self.historyfilename )
- except:
- print("SHELL: Unable to save command history")
-
- def registerCommand( self, command, function, numparams = 0, usage = "", helptext = "" ):
- """Register a command"""
- if usage == "": usage = command
- if helptext == "": helptext = function.__doc__ or "<not yet documented>"
- cmds[command] = ( function, numparams, usage, helptext )
-
- def processCommand( self, command, params ):
- """Process a command. Check number of params and print a usage string, if appropriate"""
- debugOut( "processing command '%s'..." % command )
- try:
- function, numparams, usage, helptext = cmds[command]
- except KeyError:
- print("SHELL: ERROR: '%s' command is not a valid command." % command)
- self.myout.removeLast()
- else:
- if (numparams != -1) and (not len( params ) == numparams):
- print("Usage: '%s'" % usage)
- return
-
- result = function( self.commands, params )
- debugOut( "result was '%s'" % result )
-
- def processStartupFile( self ):
- """Read and execute all commands found in $HOME/.bbsh_startup"""
- if os.path.exists( self.startupfilename ):
- startupfile = open( self.startupfilename, "r" )
- for cmdline in startupfile:
- debugOut( "processing startup line '%s'" % cmdline )
- if not cmdline:
- continue
- if "|" in cmdline:
- print("ERROR: '|' in startup file is not allowed. Ignoring line")
- continue
- self.commandQ.put( cmdline.strip() )
-
- def main( self ):
- """The main command loop"""
- while not leave_mainloop:
- try:
- if self.commandQ.empty():
- sys.stdout = self.myout.delegate
- cmdline = raw_input( "BB>> " )
- sys.stdout = self.myout
- else:
- cmdline = self.commandQ.get()
- if cmdline:
- allCommands = cmdline.split( ';' )
- for command in allCommands:
- pipecmd = None
- #
- # special case for expert mode
- if command == 'python':
- sys.stdout = self.myout.delegate
- self.processCommand( command, "" )
- sys.stdout = self.myout
- else:
- self.myout.startCommand( command )
- if '|' in command: # disable output
- command, pipecmd = command.split( '|' )
- delegate = self.myout.delegate
- self.myout.delegate = None
- tokens = shlex.split( command, True )
- self.processCommand( tokens[0], tokens[1:] or "" )
- self.myout.endCommand()
- if pipecmd is not None: # restore output
- self.myout.delegate = delegate
-
- pipe = popen2.Popen4( pipecmd )
- pipe.tochild.write( "\n".join( self.myout.lastBuffer() ) )
- pipe.tochild.close()
- sys.stdout.write( pipe.fromchild.read() )
- #
- except EOFError:
- print()
- return
- except KeyboardInterrupt:
- print()
-
-##########################################################################
-# Start function - called from the BitBake command line utility
-##########################################################################
-
-def start( aCooker ):
- global cooker
- cooker = aCooker
- bbshell = BitBakeShell()
- bbshell.processStartupFile()
- bbshell.main()
- bbshell.cleanup()
-
-if __name__ == "__main__":
- print("SHELL: Sorry, this program should only be called by BitBake.")
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py
deleted file mode 100644
index e804d611b9..0000000000
--- a/bitbake/lib/bb/siggen.py
+++ /dev/null
@@ -1,298 +0,0 @@
-import hashlib
-import logging
-import re
-import bb.data
-
-logger = logging.getLogger('BitBake.SigGen')
-
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
-
-def init(d):
- siggens = [obj for obj in globals().itervalues()
- if type(obj) is type and issubclass(obj, SignatureGenerator)]
-
- desired = bb.data.getVar("BB_SIGNATURE_HANDLER", d, True) or "noop"
- for sg in siggens:
- if desired == sg.name:
- return sg(d)
- break
- else:
- logger.error("Invalid signature generator '%s', using default 'noop'\n"
- "Available generators: %s",
- ', '.join(obj.name for obj in siggens))
- return SignatureGenerator(d)
-
-class SignatureGenerator(object):
- """
- """
- name = "noop"
-
- def __init__(self, data):
- return
-
- def finalise(self, fn, d, varient):
- return
-
- def get_taskhash(self, fn, task, deps, dataCache):
- return 0
-
- def set_taskdata(self, hashes, deps):
- return
-
- def stampfile(self, stampbase, file_name, taskname, extrainfo):
- return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
-
-class SignatureGeneratorBasic(SignatureGenerator):
- """
- """
- name = "basic"
-
- def __init__(self, data):
- self.basehash = {}
- self.taskhash = {}
- self.taskdeps = {}
- self.runtaskdeps = {}
- self.gendeps = {}
- self.lookupcache = {}
- self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST", True) or "").split())
- self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST", True) or None
-
- if self.taskwhitelist:
- self.twl = re.compile(self.taskwhitelist)
- else:
- self.twl = None
-
- def _build_data(self, fn, d):
-
- tasklist, gendeps = bb.data.generate_dependencies(d)
-
- taskdeps = {}
- basehash = {}
- lookupcache = {}
-
- for task in tasklist:
- data = d.getVar(task, False)
- lookupcache[task] = data
-
- newdeps = gendeps[task]
- seen = set()
- while newdeps:
- nextdeps = newdeps
- seen |= nextdeps
- newdeps = set()
- for dep in nextdeps:
- if dep in self.basewhitelist:
- continue
- newdeps |= gendeps[dep]
- newdeps -= seen
-
- alldeps = seen - self.basewhitelist
-
- for dep in sorted(alldeps):
- if dep in lookupcache:
- var = lookupcache[dep]
- else:
- var = d.getVar(dep, False)
- lookupcache[dep] = var
- if var:
- data = data + var
- if data is None:
- bb.error("Task %s from %s seems to be empty?!" % (task, fn))
- self.basehash[fn + "." + task] = hashlib.md5(data).hexdigest()
- taskdeps[task] = sorted(alldeps)
-
- self.taskdeps[fn] = taskdeps
- self.gendeps[fn] = gendeps
- self.lookupcache[fn] = lookupcache
-
- return taskdeps
-
- def finalise(self, fn, d, variant):
-
- if variant:
- fn = "virtual:" + variant + ":" + fn
-
- taskdeps = self._build_data(fn, d)
-
- #Slow but can be useful for debugging mismatched basehashes
- #for task in self.taskdeps[fn]:
- # self.dump_sigtask(fn, task, d.getVar("STAMP", True), False)
-
- for task in taskdeps:
- d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + "." + task])
-
- def get_taskhash(self, fn, task, deps, dataCache):
- k = fn + "." + task
- data = dataCache.basetaskhash[k]
- self.runtaskdeps[k] = []
- for dep in sorted(deps):
- # We only manipulate the dependencies for packages not in the whitelist
- if self.twl and not self.twl.search(dataCache.pkg_fn[fn]):
- # then process the actual dependencies
- dep_fn = re.search("(?P<fn>.*)\..*", dep).group('fn')
- if self.twl.search(dataCache.pkg_fn[dep_fn]):
- continue
- if dep not in self.taskhash:
- bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?", dep)
- data = data + self.taskhash[dep]
- self.runtaskdeps[k].append(dep)
- h = hashlib.md5(data).hexdigest()
- self.taskhash[k] = h
- #d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
- return h
-
- def set_taskdata(self, hashes, deps):
- self.runtaskdeps = deps
- self.taskhash = hashes
-
- def dump_sigtask(self, fn, task, stampbase, runtime):
- k = fn + "." + task
- if runtime == "customfile":
- sigfile = stampbase
- elif runtime:
- sigfile = stampbase + "." + task + ".sigdata" + "." + self.taskhash[k]
- else:
- sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[k]
-
- bb.utils.mkdirhier(os.path.dirname(sigfile))
-
- data = {}
- data['basewhitelist'] = self.basewhitelist
- data['taskwhitelist'] = self.taskwhitelist
- data['taskdeps'] = self.taskdeps[fn][task]
- data['basehash'] = self.basehash[k]
- data['gendeps'] = {}
- data['varvals'] = {}
- data['varvals'][task] = self.lookupcache[fn][task]
- for dep in self.taskdeps[fn][task]:
- if dep in self.basewhitelist:
- continue
- data['gendeps'][dep] = self.gendeps[fn][dep]
- data['varvals'][dep] = self.lookupcache[fn][dep]
-
- if runtime:
- data['runtaskdeps'] = self.runtaskdeps[k]
- data['runtaskhashes'] = {}
- for dep in data['runtaskdeps']:
- data['runtaskhashes'][dep] = self.taskhash[dep]
-
- p = pickle.Pickler(file(sigfile, "wb"), -1)
- p.dump(data)
-
- def dump_sigs(self, dataCache):
- for fn in self.taskdeps:
- for task in self.taskdeps[fn]:
- k = fn + "." + task
- if k not in self.taskhash:
- continue
- if dataCache.basetaskhash[k] != self.basehash[k]:
- bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % k)
- bb.error("The mismatched hashes were %s and %s" % (dataCache.basetaskhash[k], self.basehash[k]))
- self.dump_sigtask(fn, task, dataCache.stamp[fn], True)
-
-class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
- name = "basichash"
-
- def stampfile(self, stampbase, fn, taskname, extrainfo):
- if taskname != "do_setscene" and taskname.endswith("_setscene"):
- k = fn + "." + taskname[:-9]
- else:
- k = fn + "." + taskname
- h = self.taskhash[k]
- return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
-
-def dump_this_task(outfile, d):
- import bb.parse
- fn = d.getVar("BB_FILENAME", True)
- task = "do_" + d.getVar("BB_CURRENTTASK", True)
- bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile")
-
-def compare_sigfiles(a, b):
- p1 = pickle.Unpickler(file(a, "rb"))
- a_data = p1.load()
- p2 = pickle.Unpickler(file(b, "rb"))
- b_data = p2.load()
-
- def dict_diff(a, b):
- sa = set(a.keys())
- sb = set(b.keys())
- common = sa & sb
- changed = set()
- for i in common:
- if a[i] != b[i]:
- changed.add(i)
- added = sa - sb
- removed = sb - sa
- return changed, added, removed
-
- if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']:
- print "basewhitelist changed from %s to %s" % (a_data['basewhitelist'], b_data['basewhitelist'])
-
- if 'taskwhitelist' in a_data and a_data['taskwhitelist'] != b_data['taskwhitelist']:
- print "taskwhitelist changed from %s to %s" % (a_data['taskwhitelist'], b_data['taskwhitelist'])
-
- if a_data['taskdeps'] != b_data['taskdeps']:
- print "Task dependencies changed from %s to %s" % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps']))
-
- if a_data['basehash'] != b_data['basehash']:
- print "basehash changed from %s to %s" % (a_data['basehash'], b_data['basehash'])
-
- changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'])
- if changed:
- for dep in changed:
- print "List of dependencies for variable %s changed from %s to %s" % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep])
- if added:
- for dep in added:
- print "Dependency on variable %s was added" % (dep)
- if removed:
- for dep in removed:
- print "Dependency on Variable %s was removed" % (dep)
-
-
- changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals'])
- if changed:
- for dep in changed:
- print "Variable %s value changed from %s to %s" % (dep, a_data['varvals'][dep], b_data['varvals'][dep])
-
- if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
- changed, added, removed = dict_diff(a_data['runtaskhashes'], b_data['runtaskhashes'])
- if added:
- for dep in added:
- print "Dependency on task %s was added" % (dep)
- if removed:
- for dep in removed:
- print "Dependency on task %s was removed" % (dep)
- if changed:
- for dep in changed:
- print "Hash for dependent task %s changed from %s to %s" % (dep, a_data['runtaskhashes'][dep], b_data['runtaskhashes'][dep])
- elif 'runtaskdeps' in a_data and 'runtaskdeps' in b_data and sorted(a_data['runtaskdeps']) != sorted(b_data['runtaskdeps']):
- print "Tasks this task depends on changed from %s to %s" % (sorted(a_data['runtaskdeps']), sorted(b_data['runtaskdeps']))
-
-def dump_sigfile(a):
- p1 = pickle.Unpickler(file(a, "rb"))
- a_data = p1.load()
-
- print "basewhitelist: %s" % (a_data['basewhitelist'])
-
- print "taskwhitelist: %s" % (a_data['taskwhitelist'])
-
- print "Task dependencies: %s" % (sorted(a_data['taskdeps']))
-
- print "basehash: %s" % (a_data['basehash'])
-
- for dep in a_data['gendeps']:
- print "List of dependencies for variable %s is %s" % (dep, a_data['gendeps'][dep])
-
- for dep in a_data['varvals']:
- print "Variable %s value is %s" % (dep, a_data['varvals'][dep])
-
- if 'runtaskdeps' in a_data:
- print "Tasks this task depends on: %s" % (a_data['runtaskdeps'])
-
- if 'runtaskhashes' in a_data:
- for dep in a_data['runtaskhashes']:
- print "Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep])
diff --git a/bitbake/lib/bb/taskdata.py b/bitbake/lib/bb/taskdata.py
deleted file mode 100644
index 81a42b7b53..0000000000
--- a/bitbake/lib/bb/taskdata.py
+++ /dev/null
@@ -1,586 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'TaskData' implementation
-
-Task data collection and handling
-
-"""
-
-# Copyright (C) 2006 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import logging
-import re
-import bb
-
-logger = logging.getLogger("BitBake.TaskData")
-
-def re_match_strings(target, strings):
- """
- Whether or not the string 'target' matches
- any one string of the strings which can be regular expression string
- """
- return any(name == target or re.match(name, target)
- for name in strings)
-
-class TaskData:
- """
- BitBake Task Data implementation
- """
- def __init__(self, abort = True, tryaltconfigs = False):
- self.build_names_index = []
- self.run_names_index = []
- self.fn_index = []
-
- self.build_targets = {}
- self.run_targets = {}
-
- self.external_targets = []
-
- self.tasks_fnid = []
- self.tasks_name = []
- self.tasks_tdepends = []
- self.tasks_idepends = []
- # Cache to speed up task ID lookups
- self.tasks_lookup = {}
-
- self.depids = {}
- self.rdepids = {}
-
- self.consider_msgs_cache = []
-
- self.failed_deps = []
- self.failed_rdeps = []
- self.failed_fnids = []
-
- self.abort = abort
- self.tryaltconfigs = tryaltconfigs
-
- def getbuild_id(self, name):
- """
- Return an ID number for the build target name.
- If it doesn't exist, create one.
- """
- if not name in self.build_names_index:
- self.build_names_index.append(name)
- return len(self.build_names_index) - 1
-
- return self.build_names_index.index(name)
-
- def getrun_id(self, name):
- """
- Return an ID number for the run target name.
- If it doesn't exist, create one.
- """
- if not name in self.run_names_index:
- self.run_names_index.append(name)
- return len(self.run_names_index) - 1
-
- return self.run_names_index.index(name)
-
- def getfn_id(self, name):
- """
- Return an ID number for the filename.
- If it doesn't exist, create one.
- """
- if not name in self.fn_index:
- self.fn_index.append(name)
- return len(self.fn_index) - 1
-
- return self.fn_index.index(name)
-
- def gettask_ids(self, fnid):
- """
- Return an array of the ID numbers matching a given fnid.
- """
- ids = []
- if fnid in self.tasks_lookup:
- for task in self.tasks_lookup[fnid]:
- ids.append(self.tasks_lookup[fnid][task])
- return ids
-
- def gettask_id(self, fn, task, create = True):
- """
- Return an ID number for the task matching fn and task.
- If it doesn't exist, create one by default.
- Optionally return None instead.
- """
- fnid = self.getfn_id(fn)
-
- if fnid in self.tasks_lookup:
- if task in self.tasks_lookup[fnid]:
- return self.tasks_lookup[fnid][task]
-
- if not create:
- return None
-
- self.tasks_name.append(task)
- self.tasks_fnid.append(fnid)
- self.tasks_tdepends.append([])
- self.tasks_idepends.append([])
-
- listid = len(self.tasks_name) - 1
-
- if fnid not in self.tasks_lookup:
- self.tasks_lookup[fnid] = {}
- self.tasks_lookup[fnid][task] = listid
-
- return listid
-
- def add_tasks(self, fn, dataCache):
- """
- Add tasks for a given fn to the database
- """
-
- task_deps = dataCache.task_deps[fn]
-
- fnid = self.getfn_id(fn)
-
- if fnid in self.failed_fnids:
- bb.msg.fatal(bb.msg.domain.TaskData, "Trying to re-add a failed file? Something is broken...")
-
- # Check if we've already seen this fn
- if fnid in self.tasks_fnid:
- return
-
- for task in task_deps['tasks']:
-
- # Work out task dependencies
- parentids = []
- for dep in task_deps['parents'][task]:
- parentid = self.gettask_id(fn, dep)
- parentids.append(parentid)
- taskid = self.gettask_id(fn, task)
- self.tasks_tdepends[taskid].extend(parentids)
-
- # Touch all intertask dependencies
- if 'depends' in task_deps and task in task_deps['depends']:
- ids = []
- for dep in task_deps['depends'][task].split():
- if dep:
- if ":" not in dep:
- bb.msg.fatal(bb.msg.domain.TaskData, "Error, dependency %s does not contain ':' character\n. Task 'depends' should be specified in the form 'packagename:task'" % (dep, fn))
- ids.append(((self.getbuild_id(dep.split(":")[0])), dep.split(":")[1]))
- self.tasks_idepends[taskid].extend(ids)
-
- # Work out build dependencies
- if not fnid in self.depids:
- dependids = {}
- for depend in dataCache.deps[fn]:
- logger.debug(2, "Added dependency %s for %s", depend, fn)
- dependids[self.getbuild_id(depend)] = None
- self.depids[fnid] = dependids.keys()
-
- # Work out runtime dependencies
- if not fnid in self.rdepids:
- rdependids = {}
- rdepends = dataCache.rundeps[fn]
- rrecs = dataCache.runrecs[fn]
- for package in rdepends:
- for rdepend in rdepends[package]:
- logger.debug(2, "Added runtime dependency %s for %s", rdepend, fn)
- rdependids[self.getrun_id(rdepend)] = None
- for package in rrecs:
- for rdepend in rrecs[package]:
- logger.debug(2, "Added runtime recommendation %s for %s", rdepend, fn)
- rdependids[self.getrun_id(rdepend)] = None
- self.rdepids[fnid] = rdependids.keys()
-
- for dep in self.depids[fnid]:
- if dep in self.failed_deps:
- self.fail_fnid(fnid)
- return
- for dep in self.rdepids[fnid]:
- if dep in self.failed_rdeps:
- self.fail_fnid(fnid)
- return
-
- def have_build_target(self, target):
- """
- Have we a build target matching this name?
- """
- targetid = self.getbuild_id(target)
-
- if targetid in self.build_targets:
- return True
- return False
-
- def have_runtime_target(self, target):
- """
- Have we a runtime target matching this name?
- """
- targetid = self.getrun_id(target)
-
- if targetid in self.run_targets:
- return True
- return False
-
- def add_build_target(self, fn, item):
- """
- Add a build target.
- If already present, append the provider fn to the list
- """
- targetid = self.getbuild_id(item)
- fnid = self.getfn_id(fn)
-
- if targetid in self.build_targets:
- if fnid in self.build_targets[targetid]:
- return
- self.build_targets[targetid].append(fnid)
- return
- self.build_targets[targetid] = [fnid]
-
- def add_runtime_target(self, fn, item):
- """
- Add a runtime target.
- If already present, append the provider fn to the list
- """
- targetid = self.getrun_id(item)
- fnid = self.getfn_id(fn)
-
- if targetid in self.run_targets:
- if fnid in self.run_targets[targetid]:
- return
- self.run_targets[targetid].append(fnid)
- return
- self.run_targets[targetid] = [fnid]
-
- def mark_external_target(self, item):
- """
- Mark a build target as being externally requested
- """
- targetid = self.getbuild_id(item)
-
- if targetid not in self.external_targets:
- self.external_targets.append(targetid)
-
- def get_unresolved_build_targets(self, dataCache):
- """
- Return a list of build targets who's providers
- are unknown.
- """
- unresolved = []
- for target in self.build_names_index:
- if re_match_strings(target, dataCache.ignored_dependencies):
- continue
- if self.build_names_index.index(target) in self.failed_deps:
- continue
- if not self.have_build_target(target):
- unresolved.append(target)
- return unresolved
-
- def get_unresolved_run_targets(self, dataCache):
- """
- Return a list of runtime targets who's providers
- are unknown.
- """
- unresolved = []
- for target in self.run_names_index:
- if re_match_strings(target, dataCache.ignored_dependencies):
- continue
- if self.run_names_index.index(target) in self.failed_rdeps:
- continue
- if not self.have_runtime_target(target):
- unresolved.append(target)
- return unresolved
-
- def get_provider(self, item):
- """
- Return a list of providers of item
- """
- targetid = self.getbuild_id(item)
-
- return self.build_targets[targetid]
-
- def get_dependees(self, itemid):
- """
- Return a list of targets which depend on item
- """
- dependees = []
- for fnid in self.depids:
- if itemid in self.depids[fnid]:
- dependees.append(fnid)
- return dependees
-
- def get_dependees_str(self, item):
- """
- Return a list of targets which depend on item as a user readable string
- """
- itemid = self.getbuild_id(item)
- dependees = []
- for fnid in self.depids:
- if itemid in self.depids[fnid]:
- dependees.append(self.fn_index[fnid])
- return dependees
-
- def get_rdependees(self, itemid):
- """
- Return a list of targets which depend on runtime item
- """
- dependees = []
- for fnid in self.rdepids:
- if itemid in self.rdepids[fnid]:
- dependees.append(fnid)
- return dependees
-
- def get_rdependees_str(self, item):
- """
- Return a list of targets which depend on runtime item as a user readable string
- """
- itemid = self.getrun_id(item)
- dependees = []
- for fnid in self.rdepids:
- if itemid in self.rdepids[fnid]:
- dependees.append(self.fn_index[fnid])
- return dependees
-
- def add_provider(self, cfgData, dataCache, item):
- try:
- self.add_provider_internal(cfgData, dataCache, item)
- except bb.providers.NoProvider:
- if self.abort:
- raise
- self.remove_buildtarget(self.getbuild_id(item))
-
- self.mark_external_target(item)
-
- def add_provider_internal(self, cfgData, dataCache, item):
- """
- Add the providers of item to the task data
- Mark entries were specifically added externally as against dependencies
- added internally during dependency resolution
- """
-
- if re_match_strings(item, dataCache.ignored_dependencies):
- return
-
- if not item in dataCache.providers:
- bb.event.fire(bb.event.NoProvider(item, dependees=self.get_rdependees_str(item)), cfgData)
- raise bb.providers.NoProvider(item)
-
- if self.have_build_target(item):
- return
-
- all_p = dataCache.providers[item]
-
- eligible, foundUnique = bb.providers.filterProviders(all_p, item, cfgData, dataCache)
- eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids]
-
- if not eligible:
- bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees_str(item)), cfgData)
- raise bb.providers.NoProvider(item)
-
- if len(eligible) > 1 and foundUnique == False:
- if item not in self.consider_msgs_cache:
- providers_list = []
- for fn in eligible:
- providers_list.append(dataCache.pkg_fn[fn])
- bb.event.fire(bb.event.MultipleProviders(item, providers_list), cfgData)
- self.consider_msgs_cache.append(item)
-
- for fn in eligible:
- fnid = self.getfn_id(fn)
- if fnid in self.failed_fnids:
- continue
- logger.debug(2, "adding %s to satisfy %s", fn, item)
- self.add_build_target(fn, item)
- self.add_tasks(fn, dataCache)
-
-
- #item = dataCache.pkg_fn[fn]
-
- def add_rprovider(self, cfgData, dataCache, item):
- """
- Add the runtime providers of item to the task data
- (takes item names from RDEPENDS/PACKAGES namespace)
- """
-
- if re_match_strings(item, dataCache.ignored_dependencies):
- return
-
- if self.have_runtime_target(item):
- return
-
- all_p = bb.providers.getRuntimeProviders(dataCache, item)
-
- if not all_p:
- bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees_str(item)), cfgData)
- raise bb.providers.NoRProvider(item)
-
- eligible, numberPreferred = bb.providers.filterProvidersRunTime(all_p, item, cfgData, dataCache)
- eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids]
-
- if not eligible:
- bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees_str(item)), cfgData)
- raise bb.providers.NoRProvider(item)
-
- if len(eligible) > 1 and numberPreferred == 0:
- if item not in self.consider_msgs_cache:
- providers_list = []
- for fn in eligible:
- providers_list.append(dataCache.pkg_fn[fn])
- bb.event.fire(bb.event.MultipleProviders(item, providers_list, runtime=True), cfgData)
- self.consider_msgs_cache.append(item)
-
- if numberPreferred > 1:
- if item not in self.consider_msgs_cache:
- providers_list = []
- for fn in eligible:
- providers_list.append(dataCache.pkg_fn[fn])
- bb.event.fire(bb.event.MultipleProviders(item, providers_list, runtime=True), cfgData)
- self.consider_msgs_cache.append(item)
-
- # run through the list until we find one that we can build
- for fn in eligible:
- fnid = self.getfn_id(fn)
- if fnid in self.failed_fnids:
- continue
- logger.debug(2, "adding '%s' to satisfy runtime '%s'", fn, item)
- self.add_runtime_target(fn, item)
- self.add_tasks(fn, dataCache)
-
- def fail_fnid(self, fnid, missing_list = []):
- """
- Mark a file as failed (unbuildable)
- Remove any references from build and runtime provider lists
-
- missing_list, A list of missing requirements for this target
- """
- if fnid in self.failed_fnids:
- return
- logger.debug(1, "File '%s' is unbuildable, removing...", self.fn_index[fnid])
- self.failed_fnids.append(fnid)
- for target in self.build_targets:
- if fnid in self.build_targets[target]:
- self.build_targets[target].remove(fnid)
- if len(self.build_targets[target]) == 0:
- self.remove_buildtarget(target, missing_list)
- for target in self.run_targets:
- if fnid in self.run_targets[target]:
- self.run_targets[target].remove(fnid)
- if len(self.run_targets[target]) == 0:
- self.remove_runtarget(target, missing_list)
-
- def remove_buildtarget(self, targetid, missing_list = []):
- """
- Mark a build target as failed (unbuildable)
- Trigger removal of any files that have this as a dependency
- """
- if not missing_list:
- missing_list = [self.build_names_index[targetid]]
- else:
- missing_list = [self.build_names_index[targetid]] + missing_list
- logger.verbose("Target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", self.build_names_index[targetid], missing_list)
- self.failed_deps.append(targetid)
- dependees = self.get_dependees(targetid)
- for fnid in dependees:
- self.fail_fnid(fnid, missing_list)
- for taskid in xrange(len(self.tasks_idepends)):
- idepends = self.tasks_idepends[taskid]
- for (idependid, idependtask) in idepends:
- if idependid == targetid:
- self.fail_fnid(self.tasks_fnid[taskid], missing_list)
-
- if self.abort and targetid in self.external_targets:
- target = self.build_names_index[targetid]
- logger.error("Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s", target, missing_list)
- raise bb.providers.NoProvider(target)
-
- def remove_runtarget(self, targetid, missing_list = []):
- """
- Mark a run target as failed (unbuildable)
- Trigger removal of any files that have this as a dependency
- """
- if not missing_list:
- missing_list = [self.run_names_index[targetid]]
- else:
- missing_list = [self.run_names_index[targetid]] + missing_list
-
- logger.info("Runtime target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", self.run_names_index[targetid], missing_list)
- self.failed_rdeps.append(targetid)
- dependees = self.get_rdependees(targetid)
- for fnid in dependees:
- self.fail_fnid(fnid, missing_list)
-
- def add_unresolved(self, cfgData, dataCache):
- """
- Resolve all unresolved build and runtime targets
- """
- logger.info("Resolving any missing task queue dependencies")
- while True:
- added = 0
- for target in self.get_unresolved_build_targets(dataCache):
- try:
- self.add_provider_internal(cfgData, dataCache, target)
- added = added + 1
- except bb.providers.NoProvider:
- targetid = self.getbuild_id(target)
- if self.abort and targetid in self.external_targets:
- raise
- self.remove_buildtarget(targetid)
- for target in self.get_unresolved_run_targets(dataCache):
- try:
- self.add_rprovider(cfgData, dataCache, target)
- added = added + 1
- except bb.providers.NoRProvider:
- self.remove_runtarget(self.getrun_id(target))
- logger.debug(1, "Resolved " + str(added) + " extra dependencies")
- if added == 0:
- break
- # self.dump_data()
-
- def dump_data(self):
- """
- Dump some debug information on the internal data structures
- """
- logger.debug(3, "build_names:")
- logger.debug(3, ", ".join(self.build_names_index))
-
- logger.debug(3, "run_names:")
- logger.debug(3, ", ".join(self.run_names_index))
-
- logger.debug(3, "build_targets:")
- for buildid in xrange(len(self.build_names_index)):
- target = self.build_names_index[buildid]
- targets = "None"
- if buildid in self.build_targets:
- targets = self.build_targets[buildid]
- logger.debug(3, " (%s)%s: %s", buildid, target, targets)
-
- logger.debug(3, "run_targets:")
- for runid in xrange(len(self.run_names_index)):
- target = self.run_names_index[runid]
- targets = "None"
- if runid in self.run_targets:
- targets = self.run_targets[runid]
- logger.debug(3, " (%s)%s: %s", runid, target, targets)
-
- logger.debug(3, "tasks:")
- for task in xrange(len(self.tasks_name)):
- logger.debug(3, " (%s)%s - %s: %s",
- task,
- self.fn_index[self.tasks_fnid[task]],
- self.tasks_name[task],
- self.tasks_tdepends[task])
-
- logger.debug(3, "dependency ids (per fn):")
- for fnid in self.depids:
- logger.debug(3, " %s %s: %s", fnid, self.fn_index[fnid], self.depids[fnid])
-
- logger.debug(3, "runtime dependency ids (per fn):")
- for fnid in self.rdepids:
- logger.debug(3, " %s %s: %s", fnid, self.fn_index[fnid], self.rdepids[fnid])
diff --git a/bitbake/lib/bb/ui/__init__.py b/bitbake/lib/bb/ui/__init__.py
deleted file mode 100644
index a4805ed028..0000000000
--- a/bitbake/lib/bb/ui/__init__.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# BitBake UI Implementation
-#
-# Copyright (C) 2006-2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
diff --git a/bitbake/lib/bb/ui/crumbs/__init__.py b/bitbake/lib/bb/ui/crumbs/__init__.py
deleted file mode 100644
index a4805ed028..0000000000
--- a/bitbake/lib/bb/ui/crumbs/__init__.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# BitBake UI Implementation
-#
-# Copyright (C) 2006-2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
diff --git a/bitbake/lib/bb/ui/crumbs/buildmanager.py b/bitbake/lib/bb/ui/crumbs/buildmanager.py
deleted file mode 100644
index e858d75e4c..0000000000
--- a/bitbake/lib/bb/ui/crumbs/buildmanager.py
+++ /dev/null
@@ -1,455 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2008 Intel Corporation
-#
-# Authored by Rob Bradford <rob@linux.intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gtk
-import gobject
-import threading
-import os
-import datetime
-import time
-
-class BuildConfiguration:
- """ Represents a potential *or* historic *or* concrete build. It
- encompasses all the things that we need to tell bitbake to do to make it
- build what we want it to build.
-
- It also stored the metadata URL and the set of possible machines (and the
- distros / images / uris for these. Apart from the metdata URL these are
- not serialised to file (since they may be transient). In some ways this
- functionality might be shifted to the loader class."""
-
- def __init__ (self):
- self.metadata_url = None
-
- # Tuple of (distros, image, urls)
- self.machine_options = {}
-
- self.machine = None
- self.distro = None
- self.image = None
- self.urls = []
- self.extra_urls = []
- self.extra_pkgs = []
-
- def get_machines_model (self):
- model = gtk.ListStore (gobject.TYPE_STRING)
- for machine in self.machine_options.keys():
- model.append ([machine])
-
- return model
-
- def get_distro_and_images_models (self, machine):
- distro_model = gtk.ListStore (gobject.TYPE_STRING)
-
- for distro in self.machine_options[machine][0]:
- distro_model.append ([distro])
-
- image_model = gtk.ListStore (gobject.TYPE_STRING)
-
- for image in self.machine_options[machine][1]:
- image_model.append ([image])
-
- return (distro_model, image_model)
-
- def get_repos (self):
- self.urls = self.machine_options[self.machine][2]
- return self.urls
-
- # It might be a lot lot better if we stored these in like, bitbake conf
- # file format.
- @staticmethod
- def load_from_file (filename):
-
- conf = BuildConfiguration()
- with open(filename, "r") as f:
- for line in f:
- data = line.split (";")[1]
- if (line.startswith ("metadata-url;")):
- conf.metadata_url = data.strip()
- continue
- if (line.startswith ("url;")):
- conf.urls += [data.strip()]
- continue
- if (line.startswith ("extra-url;")):
- conf.extra_urls += [data.strip()]
- continue
- if (line.startswith ("machine;")):
- conf.machine = data.strip()
- continue
- if (line.startswith ("distribution;")):
- conf.distro = data.strip()
- continue
- if (line.startswith ("image;")):
- conf.image = data.strip()
- continue
-
- return conf
-
- # Serialise to a file. This is part of the build process and we use this
- # to be able to repeat a given build (using the same set of parameters)
- # but also so that we can include the details of the image / machine /
- # distro in the build manager tree view.
- def write_to_file (self, filename):
- f = open (filename, "w")
-
- lines = []
-
- if (self.metadata_url):
- lines += ["metadata-url;%s\n" % (self.metadata_url)]
-
- for url in self.urls:
- lines += ["url;%s\n" % (url)]
-
- for url in self.extra_urls:
- lines += ["extra-url;%s\n" % (url)]
-
- if (self.machine):
- lines += ["machine;%s\n" % (self.machine)]
-
- if (self.distro):
- lines += ["distribution;%s\n" % (self.distro)]
-
- if (self.image):
- lines += ["image;%s\n" % (self.image)]
-
- f.writelines (lines)
- f.close ()
-
-class BuildResult(gobject.GObject):
- """ Represents an historic build. Perhaps not successful. But it includes
- things such as the files that are in the directory (the output from the
- build) as well as a deserialised BuildConfiguration file that is stored in
- ".conf" in the directory for the build.
-
- This is GObject so that it can be included in the TreeStore."""
-
- (STATE_COMPLETE, STATE_FAILED, STATE_ONGOING) = \
- (0, 1, 2)
-
- def __init__ (self, parent, identifier):
- gobject.GObject.__init__ (self)
- self.date = None
-
- self.files = []
- self.status = None
- self.identifier = identifier
- self.path = os.path.join (parent, identifier)
-
- # Extract the date, since the directory name is of the
- # format build-<year><month><day>-<ordinal> we can easily
- # pull it out.
- # TODO: Better to stat a file?
- (_, date, revision) = identifier.split ("-")
- print(date)
-
- year = int (date[0:4])
- month = int (date[4:6])
- day = int (date[6:8])
-
- self.date = datetime.date (year, month, day)
-
- self.conf = None
-
- # By default builds are STATE_FAILED unless we find a "complete" file
- # in which case they are STATE_COMPLETE
- self.state = BuildResult.STATE_FAILED
- for file in os.listdir (self.path):
- if (file.startswith (".conf")):
- conffile = os.path.join (self.path, file)
- self.conf = BuildConfiguration.load_from_file (conffile)
- elif (file.startswith ("complete")):
- self.state = BuildResult.STATE_COMPLETE
- else:
- self.add_file (file)
-
- def add_file (self, file):
- # Just add the file for now. Don't care about the type.
- self.files += [(file, None)]
-
-class BuildManagerModel (gtk.TreeStore):
- """ Model for the BuildManagerTreeView. This derives from gtk.TreeStore
- but it abstracts nicely what the columns mean and the setup of the columns
- in the model. """
-
- (COL_IDENT, COL_DESC, COL_MACHINE, COL_DISTRO, COL_BUILD_RESULT, COL_DATE, COL_STATE) = \
- (0, 1, 2, 3, 4, 5, 6)
-
- def __init__ (self):
- gtk.TreeStore.__init__ (self,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_OBJECT,
- gobject.TYPE_INT64,
- gobject.TYPE_INT)
-
-class BuildManager (gobject.GObject):
- """ This class manages the historic builds that have been found in the
- "results" directory but is also used for starting a new build."""
-
- __gsignals__ = {
- 'population-finished' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- 'populate-error' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ())
- }
-
- def update_build_result (self, result, iter):
- # Convert the date into something we can sort by.
- date = long (time.mktime (result.date.timetuple()))
-
- # Add a top level entry for the build
-
- self.model.set (iter,
- BuildManagerModel.COL_IDENT, result.identifier,
- BuildManagerModel.COL_DESC, result.conf.image,
- BuildManagerModel.COL_MACHINE, result.conf.machine,
- BuildManagerModel.COL_DISTRO, result.conf.distro,
- BuildManagerModel.COL_BUILD_RESULT, result,
- BuildManagerModel.COL_DATE, date,
- BuildManagerModel.COL_STATE, result.state)
-
- # And then we use the files in the directory as the children for the
- # top level iter.
- for file in result.files:
- self.model.append (iter, (None, file[0], None, None, None, date, -1))
-
- # This function is called as an idle by the BuildManagerPopulaterThread
- def add_build_result (self, result):
- gtk.gdk.threads_enter()
- self.known_builds += [result]
-
- self.update_build_result (result, self.model.append (None))
-
- gtk.gdk.threads_leave()
-
- def notify_build_finished (self):
- # This is a bit of a hack. If we have a running build running then we
- # will have a row in the model in STATE_ONGOING. Find it and make it
- # as if it was a proper historic build (well, it is completed now....)
-
- # We need to use the iters here rather than the Python iterator
- # interface to the model since we need to pass it into
- # update_build_result
-
- iter = self.model.get_iter_first()
-
- while (iter):
- (ident, state) = self.model.get(iter,
- BuildManagerModel.COL_IDENT,
- BuildManagerModel.COL_STATE)
-
- if state == BuildResult.STATE_ONGOING:
- result = BuildResult (self.results_directory, ident)
- self.update_build_result (result, iter)
- iter = self.model.iter_next(iter)
-
- def notify_build_succeeded (self):
- # Write the "complete" file so that when we create the BuildResult
- # object we put into the model
-
- complete_file_path = os.path.join (self.cur_build_directory, "complete")
- f = file (complete_file_path, "w")
- f.close()
- self.notify_build_finished()
-
- def notify_build_failed (self):
- # Without a "complete" file then this will mark the build as failed:
- self.notify_build_finished()
-
- # This function is called as an idle
- def emit_population_finished_signal (self):
- gtk.gdk.threads_enter()
- self.emit ("population-finished")
- gtk.gdk.threads_leave()
-
- class BuildManagerPopulaterThread (threading.Thread):
- def __init__ (self, manager, directory):
- threading.Thread.__init__ (self)
- self.manager = manager
- self.directory = directory
-
- def run (self):
- # For each of the "build-<...>" directories ..
-
- if os.path.exists (self.directory):
- for directory in os.listdir (self.directory):
-
- if not directory.startswith ("build-"):
- continue
-
- build_result = BuildResult (self.directory, directory)
- self.manager.add_build_result (build_result)
-
- gobject.idle_add (BuildManager.emit_population_finished_signal,
- self.manager)
-
- def __init__ (self, server, results_directory):
- gobject.GObject.__init__ (self)
-
- # The builds that we've found from walking the result directory
- self.known_builds = []
-
- # Save out the bitbake server, we need this for issuing commands to
- # the cooker:
- self.server = server
-
- # The TreeStore that we use
- self.model = BuildManagerModel ()
-
- # The results directory is where we create (and look for) the
- # build-<xyz>-<n> directories. We need to populate ourselves from
- # directory
- self.results_directory = results_directory
- self.populate_from_directory (self.results_directory)
-
- def populate_from_directory (self, directory):
- thread = BuildManager.BuildManagerPopulaterThread (self, directory)
- thread.start()
-
- # Come up with the name for the next build ident by combining "build-"
- # with the date formatted as yyyymmdd and then an ordinal. We do this by
- # an optimistic algorithm incrementing the ordinal if we find that it
- # already exists.
- def get_next_build_ident (self):
- today = datetime.date.today ()
- datestr = str (today.year) + str (today.month) + str (today.day)
-
- revision = 0
- test_name = "build-%s-%d" % (datestr, revision)
- test_path = os.path.join (self.results_directory, test_name)
-
- while (os.path.exists (test_path)):
- revision += 1
- test_name = "build-%s-%d" % (datestr, revision)
- test_path = os.path.join (self.results_directory, test_name)
-
- return test_name
-
- # Take a BuildConfiguration and then try and build it based on the
- # parameters of that configuration. S
- def do_build (self, conf):
- server = self.server
-
- # Work out the build directory. Note we actually create the
- # directories here since we need to write the ".conf" file. Otherwise
- # we could have relied on bitbake's builder thread to actually make
- # the directories as it proceeds with the build.
- ident = self.get_next_build_ident ()
- build_directory = os.path.join (self.results_directory,
- ident)
- self.cur_build_directory = build_directory
- os.makedirs (build_directory)
-
- conffile = os.path.join (build_directory, ".conf")
- conf.write_to_file (conffile)
-
- # Add a row to the model representing this ongoing build. It's kinda a
- # fake entry. If this build completes or fails then this gets updated
- # with the real stuff like the historic builds
- date = long (time.time())
- self.model.append (None, (ident, conf.image, conf.machine, conf.distro,
- None, date, BuildResult.STATE_ONGOING))
- try:
- server.runCommand(["setVariable", "BUILD_IMAGES_FROM_FEEDS", 1])
- server.runCommand(["setVariable", "MACHINE", conf.machine])
- server.runCommand(["setVariable", "DISTRO", conf.distro])
- server.runCommand(["setVariable", "PACKAGE_CLASSES", "package_ipk"])
- server.runCommand(["setVariable", "BBFILES", \
- """${OEROOT}/meta/packages/*/*.bb ${OEROOT}/meta-moblin/packages/*/*.bb"""])
- server.runCommand(["setVariable", "TMPDIR", "${OEROOT}/build/tmp"])
- server.runCommand(["setVariable", "IPK_FEED_URIS", \
- " ".join(conf.get_repos())])
- server.runCommand(["setVariable", "DEPLOY_DIR_IMAGE",
- build_directory])
- server.runCommand(["buildTargets", [conf.image], "rootfs"])
-
- except Exception as e:
- print(e)
-
-class BuildManagerTreeView (gtk.TreeView):
- """ The tree view for the build manager. This shows the historic builds
- and so forth. """
-
- # We use this function to control what goes in the cell since we store
- # the date in the model as seconds since the epoch (for sorting) and so we
- # need to make it human readable.
- def date_format_custom_cell_data_func (self, col, cell, model, iter):
- date = model.get (iter, BuildManagerModel.COL_DATE)[0]
- datestr = time.strftime("%A %d %B %Y", time.localtime(date))
- cell.set_property ("text", datestr)
-
- # This format function controls what goes in the cell. We use this to map
- # the integer state to a string and also to colourise the text
- def state_format_custom_cell_data_fun (self, col, cell, model, iter):
- state = model.get (iter, BuildManagerModel.COL_STATE)[0]
-
- if (state == BuildResult.STATE_ONGOING):
- cell.set_property ("text", "Active")
- cell.set_property ("foreground", "#000000")
- elif (state == BuildResult.STATE_FAILED):
- cell.set_property ("text", "Failed")
- cell.set_property ("foreground", "#ff0000")
- elif (state == BuildResult.STATE_COMPLETE):
- cell.set_property ("text", "Complete")
- cell.set_property ("foreground", "#00ff00")
- else:
- cell.set_property ("text", "")
-
- def __init__ (self):
- gtk.TreeView.__init__(self)
-
- # Misc descriptiony thing
- renderer = gtk.CellRendererText ()
- col = gtk.TreeViewColumn (None, renderer,
- text=BuildManagerModel.COL_DESC)
- self.append_column (col)
-
- # Machine
- renderer = gtk.CellRendererText ()
- col = gtk.TreeViewColumn ("Machine", renderer,
- text=BuildManagerModel.COL_MACHINE)
- self.append_column (col)
-
- # distro
- renderer = gtk.CellRendererText ()
- col = gtk.TreeViewColumn ("Distribution", renderer,
- text=BuildManagerModel.COL_DISTRO)
- self.append_column (col)
-
- # date (using a custom function for formatting the cell contents it
- # takes epoch -> human readable string)
- renderer = gtk.CellRendererText ()
- col = gtk.TreeViewColumn ("Date", renderer,
- text=BuildManagerModel.COL_DATE)
- self.append_column (col)
- col.set_cell_data_func (renderer,
- self.date_format_custom_cell_data_func)
-
- # For status.
- renderer = gtk.CellRendererText ()
- col = gtk.TreeViewColumn ("Status", renderer,
- text = BuildManagerModel.COL_STATE)
- self.append_column (col)
- col.set_cell_data_func (renderer,
- self.state_format_custom_cell_data_fun)
diff --git a/bitbake/lib/bb/ui/crumbs/progress.py b/bitbake/lib/bb/ui/crumbs/progress.py
deleted file mode 100644
index 36eca38294..0000000000
--- a/bitbake/lib/bb/ui/crumbs/progress.py
+++ /dev/null
@@ -1,17 +0,0 @@
-import gtk
-
-class ProgressBar(gtk.Dialog):
- def __init__(self, parent):
-
- gtk.Dialog.__init__(self)
- self.set_title("Parsing metadata, please wait...")
- self.set_default_size(500, 0)
- self.set_transient_for(parent)
- self.set_destroy_with_parent(True)
- self.progress = gtk.ProgressBar()
- self.vbox.pack_start(self.progress)
- self.show_all()
-
- def update(self, x, y):
- self.progress.set_fraction(float(x)/float(y))
- self.progress.set_text("%2d %%" % (x*100/y))
diff --git a/bitbake/lib/bb/ui/crumbs/puccho.glade b/bitbake/lib/bb/ui/crumbs/puccho.glade
deleted file mode 100644
index d7553a6e14..0000000000
--- a/bitbake/lib/bb/ui/crumbs/puccho.glade
+++ /dev/null
@@ -1,606 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<!DOCTYPE glade-interface SYSTEM "glade-2.0.dtd">
-<!--Generated with glade3 3.4.5 on Mon Nov 10 12:24:12 2008 -->
-<glade-interface>
- <widget class="GtkDialog" id="build_dialog">
- <property name="title" translatable="yes">Start a build</property>
- <property name="window_position">GTK_WIN_POS_CENTER_ON_PARENT</property>
- <property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
- <property name="has_separator">False</property>
- <child internal-child="vbox">
- <widget class="GtkVBox" id="dialog-vbox1">
- <property name="visible">True</property>
- <property name="spacing">2</property>
- <child>
- <widget class="GtkTable" id="build_table">
- <property name="visible">True</property>
- <property name="border_width">6</property>
- <property name="n_rows">7</property>
- <property name="n_columns">3</property>
- <property name="column_spacing">5</property>
- <property name="row_spacing">6</property>
- <child>
- <widget class="GtkAlignment" id="status_alignment">
- <property name="visible">True</property>
- <property name="left_padding">12</property>
- <child>
- <widget class="GtkHBox" id="status_hbox">
- <property name="spacing">6</property>
- <child>
- <widget class="GtkImage" id="status_image">
- <property name="visible">True</property>
- <property name="no_show_all">True</property>
- <property name="xalign">0</property>
- <property name="stock">gtk-dialog-error</property>
- </widget>
- <packing>
- <property name="expand">False</property>
- <property name="fill">False</property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="status_label">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="label" translatable="yes">If you see this text something is wrong...</property>
- <property name="use_markup">True</property>
- <property name="use_underline">True</property>
- </widget>
- <packing>
- <property name="position">1</property>
- </packing>
- </child>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="top_attach">2</property>
- <property name="bottom_attach">3</property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="label2">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="label" translatable="yes">&lt;b&gt;Build configuration&lt;/b&gt;</property>
- <property name="use_markup">True</property>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="top_attach">3</property>
- <property name="bottom_attach">4</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkComboBox" id="image_combo">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">2</property>
- <property name="top_attach">6</property>
- <property name="bottom_attach">7</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="image_label">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- <property name="xalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Image:</property>
- </widget>
- <packing>
- <property name="top_attach">6</property>
- <property name="bottom_attach">7</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkComboBox" id="distribution_combo">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">2</property>
- <property name="top_attach">5</property>
- <property name="bottom_attach">6</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="distribution_label">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- <property name="xalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Distribution:</property>
- </widget>
- <packing>
- <property name="top_attach">5</property>
- <property name="bottom_attach">6</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkComboBox" id="machine_combo">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">2</property>
- <property name="top_attach">4</property>
- <property name="bottom_attach">5</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="machine_label">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- <property name="xalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Machine:</property>
- </widget>
- <packing>
- <property name="top_attach">4</property>
- <property name="bottom_attach">5</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkButton" id="refresh_button">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- <property name="can_focus">True</property>
- <property name="receives_default">True</property>
- <property name="label" translatable="yes">gtk-refresh</property>
- <property name="use_stock">True</property>
- <property name="response_id">0</property>
- </widget>
- <packing>
- <property name="left_attach">2</property>
- <property name="right_attach">3</property>
- <property name="top_attach">1</property>
- <property name="bottom_attach">2</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkEntry" id="location_entry">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="width_chars">32</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">2</property>
- <property name="top_attach">1</property>
- <property name="bottom_attach">2</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="label3">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Location:</property>
- </widget>
- <packing>
- <property name="top_attach">1</property>
- <property name="bottom_attach">2</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="label1">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="label" translatable="yes">&lt;b&gt;Repository&lt;/b&gt;</property>
- <property name="use_markup">True</property>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment1">
- <property name="visible">True</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="left_attach">2</property>
- <property name="right_attach">3</property>
- <property name="top_attach">4</property>
- <property name="bottom_attach">5</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment2">
- <property name="visible">True</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="left_attach">2</property>
- <property name="right_attach">3</property>
- <property name="top_attach">5</property>
- <property name="bottom_attach">6</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment3">
- <property name="visible">True</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="left_attach">2</property>
- <property name="right_attach">3</property>
- <property name="top_attach">6</property>
- <property name="bottom_attach">7</property>
- <property name="y_options"></property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="position">1</property>
- </packing>
- </child>
- <child internal-child="action_area">
- <widget class="GtkHButtonBox" id="dialog-action_area1">
- <property name="visible">True</property>
- <property name="layout_style">GTK_BUTTONBOX_END</property>
- <child>
- <placeholder/>
- </child>
- <child>
- <placeholder/>
- </child>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="expand">False</property>
- <property name="pack_type">GTK_PACK_END</property>
- </packing>
- </child>
- </widget>
- </child>
- </widget>
- <widget class="GtkDialog" id="dialog2">
- <property name="window_position">GTK_WIN_POS_CENTER_ON_PARENT</property>
- <property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
- <property name="has_separator">False</property>
- <child internal-child="vbox">
- <widget class="GtkVBox" id="dialog-vbox2">
- <property name="visible">True</property>
- <property name="spacing">2</property>
- <child>
- <widget class="GtkTable" id="table2">
- <property name="visible">True</property>
- <property name="border_width">6</property>
- <property name="n_rows">7</property>
- <property name="n_columns">3</property>
- <property name="column_spacing">6</property>
- <property name="row_spacing">6</property>
- <child>
- <widget class="GtkLabel" id="label7">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="label" translatable="yes">&lt;b&gt;Repositories&lt;/b&gt;</property>
- <property name="use_markup">True</property>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment4">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="left_padding">12</property>
- <child>
- <widget class="GtkScrolledWindow" id="scrolledwindow1">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <child>
- <widget class="GtkTreeView" id="treeview1">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="headers_clickable">True</property>
- </widget>
- </child>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="top_attach">2</property>
- <property name="bottom_attach">3</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkEntry" id="entry1">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">3</property>
- <property name="top_attach">1</property>
- <property name="bottom_attach">2</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="label9">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="label" translatable="yes">&lt;b&gt;Additional packages&lt;/b&gt;</property>
- <property name="use_markup">True</property>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="top_attach">4</property>
- <property name="bottom_attach">5</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment6">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="xscale">0</property>
- <child>
- <widget class="GtkLabel" id="label8">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="yalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Location: </property>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="top_attach">1</property>
- <property name="bottom_attach">2</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment7">
- <property name="visible">True</property>
- <property name="xalign">1</property>
- <property name="xscale">0</property>
- <child>
- <widget class="GtkHButtonBox" id="hbuttonbox1">
- <property name="visible">True</property>
- <property name="spacing">5</property>
- <child>
- <widget class="GtkButton" id="button7">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="receives_default">True</property>
- <property name="label" translatable="yes">gtk-remove</property>
- <property name="use_stock">True</property>
- <property name="response_id">0</property>
- </widget>
- </child>
- <child>
- <widget class="GtkButton" id="button6">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="receives_default">True</property>
- <property name="label" translatable="yes">gtk-edit</property>
- <property name="use_stock">True</property>
- <property name="response_id">0</property>
- </widget>
- <packing>
- <property name="position">1</property>
- </packing>
- </child>
- <child>
- <widget class="GtkButton" id="button5">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="receives_default">True</property>
- <property name="label" translatable="yes">gtk-add</property>
- <property name="use_stock">True</property>
- <property name="response_id">0</property>
- </widget>
- <packing>
- <property name="position">2</property>
- </packing>
- </child>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">3</property>
- <property name="top_attach">3</property>
- <property name="bottom_attach">4</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment5">
- <property name="visible">True</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="top_attach">3</property>
- <property name="bottom_attach">4</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="label10">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="yalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Search:</property>
- </widget>
- <packing>
- <property name="top_attach">5</property>
- <property name="bottom_attach">6</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkEntry" id="entry2">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">3</property>
- <property name="top_attach">5</property>
- <property name="bottom_attach">6</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment8">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="left_padding">12</property>
- <child>
- <widget class="GtkScrolledWindow" id="scrolledwindow2">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <child>
- <widget class="GtkTreeView" id="treeview2">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="headers_clickable">True</property>
- </widget>
- </child>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="top_attach">6</property>
- <property name="bottom_attach">7</property>
- <property name="y_options"></property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="position">1</property>
- </packing>
- </child>
- <child internal-child="action_area">
- <widget class="GtkHButtonBox" id="dialog-action_area2">
- <property name="visible">True</property>
- <property name="layout_style">GTK_BUTTONBOX_END</property>
- <child>
- <widget class="GtkButton" id="button4">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="receives_default">True</property>
- <property name="label" translatable="yes">gtk-close</property>
- <property name="use_stock">True</property>
- <property name="response_id">0</property>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="expand">False</property>
- <property name="pack_type">GTK_PACK_END</property>
- </packing>
- </child>
- </widget>
- </child>
- </widget>
- <widget class="GtkWindow" id="main_window">
- <child>
- <widget class="GtkVBox" id="main_window_vbox">
- <property name="visible">True</property>
- <child>
- <widget class="GtkToolbar" id="main_toolbar">
- <property name="visible">True</property>
- <child>
- <widget class="GtkToolButton" id="main_toolbutton_build">
- <property name="visible">True</property>
- <property name="label" translatable="yes">Build</property>
- <property name="stock_id">gtk-execute</property>
- </widget>
- <packing>
- <property name="expand">False</property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="expand">False</property>
- </packing>
- </child>
- <child>
- <widget class="GtkVPaned" id="vpaned1">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <child>
- <widget class="GtkScrolledWindow" id="results_scrolledwindow">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="resize">False</property>
- <property name="shrink">True</property>
- </packing>
- </child>
- <child>
- <widget class="GtkScrolledWindow" id="progress_scrolledwindow">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="resize">True</property>
- <property name="shrink">True</property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="position">1</property>
- </packing>
- </child>
- </widget>
- </child>
- </widget>
-</glade-interface>
diff --git a/bitbake/lib/bb/ui/crumbs/runningbuild.py b/bitbake/lib/bb/ui/crumbs/runningbuild.py
deleted file mode 100644
index 4703e6d844..0000000000
--- a/bitbake/lib/bb/ui/crumbs/runningbuild.py
+++ /dev/null
@@ -1,311 +0,0 @@
-
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2008 Intel Corporation
-#
-# Authored by Rob Bradford <rob@linux.intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gtk
-import gobject
-import logging
-import time
-import urllib
-import urllib2
-
-class Colors(object):
- OK = "#ffffff"
- RUNNING = "#aaffaa"
- WARNING ="#f88017"
- ERROR = "#ffaaaa"
-
-class RunningBuildModel (gtk.TreeStore):
- (COL_LOG, COL_PACKAGE, COL_TASK, COL_MESSAGE, COL_ICON, COL_COLOR, COL_NUM_ACTIVE) = range(7)
-
- def __init__ (self):
- gtk.TreeStore.__init__ (self,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_INT)
-
-class RunningBuild (gobject.GObject):
- __gsignals__ = {
- 'build-succeeded' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- 'build-failed' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ())
- }
- pids_to_task = {}
- tasks_to_iter = {}
-
- def __init__ (self):
- gobject.GObject.__init__ (self)
- self.model = RunningBuildModel()
-
- def handle_event (self, event, pbar=None):
- # Handle an event from the event queue, this may result in updating
- # the model and thus the UI. Or it may be to tell us that the build
- # has finished successfully (or not, as the case may be.)
-
- parent = None
- pid = 0
- package = None
- task = None
-
- # If we have a pid attached to this message/event try and get the
- # (package, task) pair for it. If we get that then get the parent iter
- # for the message.
- if hasattr(event, 'pid'):
- pid = event.pid
- if hasattr(event, 'process'):
- pid = event.process
-
- if pid and pid in self.pids_to_task:
- (package, task) = self.pids_to_task[pid]
- parent = self.tasks_to_iter[(package, task)]
-
- if(isinstance(event, logging.LogRecord)):
- if (event.msg.startswith ("Running task")):
- return # don't add these to the list
-
- if event.levelno >= logging.ERROR:
- icon = "dialog-error"
- color = Colors.ERROR
- elif event.levelno >= logging.WARNING:
- icon = "dialog-warning"
- color = Colors.WARNING
- else:
- icon = None
- color = Colors.OK
-
- # if we know which package we belong to, we'll append onto its list.
- # otherwise, we'll jump to the top of the master list
- if parent:
- tree_add = self.model.append
- else:
- tree_add = self.model.prepend
- tree_add(parent,
- (None,
- package,
- task,
- event.getMessage(),
- icon,
- color,
- 0))
-
- elif isinstance(event, bb.build.TaskStarted):
- (package, task) = (event._package, event._task)
-
- # Save out this PID.
- self.pids_to_task[pid] = (package, task)
-
- # Check if we already have this package in our model. If so then
- # that can be the parent for the task. Otherwise we create a new
- # top level for the package.
- if ((package, None) in self.tasks_to_iter):
- parent = self.tasks_to_iter[(package, None)]
- else:
- parent = self.model.prepend(None, (None,
- package,
- None,
- "Package: %s" % (package),
- None,
- Colors.OK,
- 0))
- self.tasks_to_iter[(package, None)] = parent
-
- # Because this parent package now has an active child mark it as
- # such.
- # @todo if parent is already in error, don't mark it green
- self.model.set(parent, self.model.COL_ICON, "gtk-execute",
- self.model.COL_COLOR, Colors.RUNNING)
-
- # Add an entry in the model for this task
- i = self.model.append (parent, (None,
- package,
- task,
- "Task: %s" % (task),
- "gtk-execute",
- Colors.RUNNING,
- 0))
-
- # update the parent's active task count
- num_active = self.model.get(parent, self.model.COL_NUM_ACTIVE)[0] + 1
- self.model.set(parent, self.model.COL_NUM_ACTIVE, num_active)
-
- # Save out the iter so that we can find it when we have a message
- # that we need to attach to a task.
- self.tasks_to_iter[(package, task)] = i
-
- elif isinstance(event, bb.build.TaskBase):
- current = self.tasks_to_iter[(package, task)]
- parent = self.tasks_to_iter[(package, None)]
-
- # remove this task from the parent's active count
- num_active = self.model.get(parent, self.model.COL_NUM_ACTIVE)[0] - 1
- self.model.set(parent, self.model.COL_NUM_ACTIVE, num_active)
-
- if isinstance(event, bb.build.TaskFailed):
- # Mark the task and parent as failed
- icon = "dialog-error"
- color = Colors.ERROR
-
- logfile = event.logfile
- if logfile and os.path.exists(logfile):
- with open(logfile) as f:
- logdata = f.read()
- self.model.append(current, ('pastebin', None, None, logdata, 'gtk-error', Colors.OK, 0))
-
- for i in (current, parent):
- self.model.set(i, self.model.COL_ICON, icon,
- self.model.COL_COLOR, color)
- else:
- icon = None
- color = Colors.OK
-
- # Mark the task as inactive
- self.model.set(current, self.model.COL_ICON, icon,
- self.model.COL_COLOR, color)
-
- # Mark the parent package as inactive, but make sure to
- # preserve error and active states
- i = self.tasks_to_iter[(package, None)]
- if self.model.get(parent, self.model.COL_ICON) != 'dialog-error':
- self.model.set(parent, self.model.COL_ICON, icon)
- if num_active == 0:
- self.model.set(parent, self.model.COL_COLOR, Colors.OK)
-
- # Clear the iters and the pids since when the task goes away the
- # pid will no longer be used for messages
- del self.tasks_to_iter[(package, task)]
- del self.pids_to_task[pid]
-
- elif isinstance(event, bb.event.BuildStarted):
-
- self.model.prepend(None, (None,
- None,
- None,
- "Build Started (%s)" % time.strftime('%m/%d/%Y %H:%M:%S'),
- None,
- Colors.OK,
- 0))
- elif isinstance(event, bb.event.BuildCompleted):
- failures = int (event._failures)
- self.model.prepend(None, (None,
- None,
- None,
- "Build Completed (%s)" % time.strftime('%m/%d/%Y %H:%M:%S'),
- None,
- Colors.OK,
- 0))
-
- # Emit the appropriate signal depending on the number of failures
- if (failures >= 1):
- self.emit ("build-failed")
- else:
- self.emit ("build-succeeded")
-
- elif isinstance(event, bb.event.CacheLoadStarted) and pbar:
- pbar.set_title("Loading cache")
- self.progress_total = event.total
- pbar.update(0, self.progress_total)
- elif isinstance(event, bb.event.CacheLoadProgress) and pbar:
- pbar.update(event.current, self.progress_total)
- elif isinstance(event, bb.event.CacheLoadCompleted) and pbar:
- pbar.update(self.progress_total, self.progress_total)
-
- elif isinstance(event, bb.event.ParseStarted) and pbar:
- pbar.set_title("Processing recipes")
- self.progress_total = event.total
- pbar.update(0, self.progress_total)
- elif isinstance(event, bb.event.ParseProgress) and pbar:
- pbar.update(event.current, self.progress_total)
- elif isinstance(event, bb.event.ParseCompleted) and pbar:
- pbar.hide()
-
- return
-
-
-def do_pastebin(text):
- url = 'http://pastebin.com/api_public.php'
- params = {'paste_code': text, 'paste_format': 'text'}
-
- req = urllib2.Request(url, urllib.urlencode(params))
- response = urllib2.urlopen(req)
- paste_url = response.read()
-
- return paste_url
-
-
-class RunningBuildTreeView (gtk.TreeView):
- __gsignals__ = {
- "button_press_event" : "override"
- }
- def __init__ (self):
- gtk.TreeView.__init__ (self)
-
- # The icon that indicates whether we're building or failed.
- renderer = gtk.CellRendererPixbuf ()
- col = gtk.TreeViewColumn ("Status", renderer)
- col.add_attribute (renderer, "icon-name", 4)
- self.append_column (col)
-
- # The message of the build.
- self.message_renderer = gtk.CellRendererText ()
- self.message_column = gtk.TreeViewColumn ("Message", self.message_renderer, text=3)
- self.message_column.add_attribute(self.message_renderer, 'background', 5)
- self.message_renderer.set_property('editable', 5)
- self.append_column (self.message_column)
-
- def do_button_press_event(self, event):
- gtk.TreeView.do_button_press_event(self, event)
-
- if event.button == 3:
- selection = super(RunningBuildTreeView, self).get_selection()
- (model, iter) = selection.get_selected()
- if iter is not None:
- can_paste = model.get(iter, model.COL_LOG)[0]
- if can_paste == 'pastebin':
- # build a simple menu with a pastebin option
- menu = gtk.Menu()
- menuitem = gtk.MenuItem("Send log to pastebin")
- menu.append(menuitem)
- menuitem.connect("activate", self.pastebin_handler, (model, iter))
- menuitem.show()
- menu.show()
- menu.popup(None, None, None, event.button, event.time)
-
- def pastebin_handler(self, widget, data):
- """
- Send the log data to pastebin, then add the new paste url to the
- clipboard.
- """
- (model, iter) = data
- paste_url = do_pastebin(model.get(iter, model.COL_MESSAGE)[0])
-
- # @todo Provide visual feedback to the user that it is done and that
- # it worked.
- print paste_url
-
- clipboard = gtk.clipboard_get()
- clipboard.set_text(paste_url)
- clipboard.store() \ No newline at end of file
diff --git a/bitbake/lib/bb/ui/depexp.py b/bitbake/lib/bb/ui/depexp.py
deleted file mode 100644
index 3dbd5e0eca..0000000000
--- a/bitbake/lib/bb/ui/depexp.py
+++ /dev/null
@@ -1,307 +0,0 @@
-#
-# BitBake Graphical GTK based Dependency Explorer
-#
-# Copyright (C) 2007 Ross Burton
-# Copyright (C) 2007 - 2008 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gobject
-import gtk
-import Queue
-import threading
-import xmlrpclib
-import bb
-import bb.event
-from bb.ui.crumbs.progress import ProgressBar
-
-# Package Model
-(COL_PKG_NAME) = (0)
-
-# Dependency Model
-(TYPE_DEP, TYPE_RDEP) = (0, 1)
-(COL_DEP_TYPE, COL_DEP_PARENT, COL_DEP_PACKAGE) = (0, 1, 2)
-
-
-class PackageDepView(gtk.TreeView):
- def __init__(self, model, dep_type, label):
- gtk.TreeView.__init__(self)
- self.current = None
- self.dep_type = dep_type
- self.filter_model = model.filter_new()
- self.filter_model.set_visible_func(self._filter)
- self.set_model(self.filter_model)
- #self.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
- self.append_column(gtk.TreeViewColumn(label, gtk.CellRendererText(), text=COL_DEP_PACKAGE))
-
- def _filter(self, model, iter):
- (this_type, package) = model.get(iter, COL_DEP_TYPE, COL_DEP_PARENT)
- if this_type != self.dep_type: return False
- return package == self.current
-
- def set_current_package(self, package):
- self.current = package
- self.filter_model.refilter()
-
-
-class PackageReverseDepView(gtk.TreeView):
- def __init__(self, model, label):
- gtk.TreeView.__init__(self)
- self.current = None
- self.filter_model = model.filter_new()
- self.filter_model.set_visible_func(self._filter)
- self.set_model(self.filter_model)
- self.append_column(gtk.TreeViewColumn(label, gtk.CellRendererText(), text=COL_DEP_PARENT))
-
- def _filter(self, model, iter):
- package = model.get_value(iter, COL_DEP_PACKAGE)
- return package == self.current
-
- def set_current_package(self, package):
- self.current = package
- self.filter_model.refilter()
-
-
-class DepExplorer(gtk.Window):
- def __init__(self):
- gtk.Window.__init__(self)
- self.set_title("Dependency Explorer")
- self.set_default_size(500, 500)
- self.connect("delete-event", gtk.main_quit)
-
- # Create the data models
- self.pkg_model = gtk.ListStore(gobject.TYPE_STRING)
- self.pkg_model.set_sort_column_id(COL_PKG_NAME, gtk.SORT_ASCENDING)
- self.depends_model = gtk.ListStore(gobject.TYPE_INT, gobject.TYPE_STRING, gobject.TYPE_STRING)
- self.depends_model.set_sort_column_id(COL_DEP_PACKAGE, gtk.SORT_ASCENDING)
-
- pane = gtk.HPaned()
- pane.set_position(250)
- self.add(pane)
-
- # The master list of packages
- scrolled = gtk.ScrolledWindow()
- scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
- scrolled.set_shadow_type(gtk.SHADOW_IN)
-
- self.pkg_treeview = gtk.TreeView(self.pkg_model)
- self.pkg_treeview.get_selection().connect("changed", self.on_cursor_changed)
- column = gtk.TreeViewColumn("Package", gtk.CellRendererText(), text=COL_PKG_NAME)
- self.pkg_treeview.append_column(column)
- pane.add1(scrolled)
- scrolled.add(self.pkg_treeview)
-
- box = gtk.VBox(homogeneous=True, spacing=4)
-
- # Runtime Depends
- scrolled = gtk.ScrolledWindow()
- scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
- scrolled.set_shadow_type(gtk.SHADOW_IN)
- self.rdep_treeview = PackageDepView(self.depends_model, TYPE_RDEP, "Runtime Depends")
- self.rdep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
- scrolled.add(self.rdep_treeview)
- box.add(scrolled)
-
- # Build Depends
- scrolled = gtk.ScrolledWindow()
- scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
- scrolled.set_shadow_type(gtk.SHADOW_IN)
- self.dep_treeview = PackageDepView(self.depends_model, TYPE_DEP, "Build Depends")
- self.dep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
- scrolled.add(self.dep_treeview)
- box.add(scrolled)
- pane.add2(box)
-
- # Reverse Depends
- scrolled = gtk.ScrolledWindow()
- scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
- scrolled.set_shadow_type(gtk.SHADOW_IN)
- self.revdep_treeview = PackageReverseDepView(self.depends_model, "Reverse Depends")
- self.revdep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PARENT)
- scrolled.add(self.revdep_treeview)
- box.add(scrolled)
- pane.add2(box)
-
- self.show_all()
-
- def on_package_activated(self, treeview, path, column, data_col):
- model = treeview.get_model()
- package = model.get_value(model.get_iter(path), data_col)
-
- pkg_path = []
- def finder(model, path, iter, needle):
- package = model.get_value(iter, COL_PKG_NAME)
- if package == needle:
- pkg_path.append(path)
- return True
- else:
- return False
- self.pkg_model.foreach(finder, package)
- if pkg_path:
- self.pkg_treeview.get_selection().select_path(pkg_path[0])
- self.pkg_treeview.scroll_to_cell(pkg_path[0])
-
- def on_cursor_changed(self, selection):
- (model, it) = selection.get_selected()
- if iter is None:
- current_package = None
- else:
- current_package = model.get_value(it, COL_PKG_NAME)
- self.rdep_treeview.set_current_package(current_package)
- self.dep_treeview.set_current_package(current_package)
- self.revdep_treeview.set_current_package(current_package)
-
-
-def parse(depgraph, pkg_model, depends_model):
- for package in depgraph["pn"]:
- pkg_model.set(pkg_model.append(), COL_PKG_NAME, package)
-
- for package in depgraph["depends"]:
- for depend in depgraph["depends"][package]:
- depends_model.set (depends_model.append(),
- COL_DEP_TYPE, TYPE_DEP,
- COL_DEP_PARENT, package,
- COL_DEP_PACKAGE, depend)
-
- for package in depgraph["rdepends-pn"]:
- for rdepend in depgraph["rdepends-pn"][package]:
- depends_model.set (depends_model.append(),
- COL_DEP_TYPE, TYPE_RDEP,
- COL_DEP_PARENT, package,
- COL_DEP_PACKAGE, rdepend)
-
-
-class gtkthread(threading.Thread):
- quit = threading.Event()
- def __init__(self, shutdown):
- threading.Thread.__init__(self)
- self.setDaemon(True)
- self.shutdown = shutdown
-
- def run(self):
- gobject.threads_init()
- gtk.gdk.threads_init()
- gtk.main()
- gtkthread.quit.set()
-
-
-def main(server, eventHandler):
- try:
- cmdline = server.runCommand(["getCmdLineAction"])
- if not cmdline or cmdline[0] != "generateDotGraph":
- print("This UI is only compatible with the -g option")
- return
- ret = server.runCommand(["generateDepTreeEvent", cmdline[1], cmdline[2]])
- if ret != True:
- print("Couldn't run command! %s" % ret)
- return
- except xmlrpclib.Fault as x:
- print("XMLRPC Fault getting commandline:\n %s" % x)
- return
-
- shutdown = 0
-
- gtkgui = gtkthread(shutdown)
- gtkgui.start()
-
- gtk.gdk.threads_enter()
- dep = DepExplorer()
- pbar = ProgressBar(dep)
- pbar.connect("delete-event", gtk.main_quit)
- gtk.gdk.threads_leave()
-
- progress_total = 0
- while True:
- try:
- event = eventHandler.waitEvent(0.25)
- if gtkthread.quit.isSet():
- server.runCommand(["stateStop"])
- break
-
- if event is None:
- continue
-
- if isinstance(event, bb.event.CacheLoadStarted):
- progress_total = event.total
- gtk.gdk.threads_enter()
- pbar.set_title("Loading Cache")
- pbar.update(0, progress_total)
- gtk.gdk.threads_leave()
-
- if isinstance(event, bb.event.CacheLoadProgress):
- x = event.current
- gtk.gdk.threads_enter()
- pbar.update(x, progress_total)
- gtk.gdk.threads_leave()
- continue
-
- if isinstance(event, bb.event.CacheLoadCompleted):
- gtk.gdk.threads_enter()
- pbar.update(progress_total, progress_total)
- gtk.gdk.threads_leave()
- continue
-
- if isinstance(event, bb.event.ParseStarted):
- progress_total = event.total
- gtk.gdk.threads_enter()
- pbar.set_title("Processing recipes")
- pbar.update(0, progress_total)
- gtk.gdk.threads_leave()
-
- if isinstance(event, bb.event.ParseProgress):
- x = event.current
- gtk.gdk.threads_enter()
- pbar.update(x, progress_total)
- gtk.gdk.threads_leave()
- continue
-
- if isinstance(event, bb.event.ParseCompleted):
- pbar.hide()
- continue
-
- if isinstance(event, bb.event.DepTreeGenerated):
- gtk.gdk.threads_enter()
- parse(event._depgraph, dep.pkg_model, dep.depends_model)
- gtk.gdk.threads_leave()
-
- if isinstance(event, bb.command.CommandCompleted):
- continue
-
- if isinstance(event, bb.command.CommandFailed):
- print("Command execution failed: %s" % event.error)
- return event.exitcode
-
- if isinstance(event, bb.command.CommandExit):
- return event.exitcode
-
- if isinstance(event, bb.cooker.CookerExit):
- break
-
- continue
- except EnvironmentError as ioerror:
- # ignore interrupted io
- if ioerror.args[0] == 4:
- pass
- except KeyboardInterrupt:
- if shutdown == 2:
- print("\nThird Keyboard Interrupt, exit.\n")
- break
- if shutdown == 1:
- print("\nSecond Keyboard Interrupt, stopping...\n")
- server.runCommand(["stateStop"])
- if shutdown == 0:
- print("\nKeyboard Interrupt, closing down...\n")
- server.runCommand(["stateShutdown"])
- shutdown = shutdown + 1
- pass
diff --git a/bitbake/lib/bb/ui/goggle.py b/bitbake/lib/bb/ui/goggle.py
deleted file mode 100644
index ec5a38dd4d..0000000000
--- a/bitbake/lib/bb/ui/goggle.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2008 Intel Corporation
-#
-# Authored by Rob Bradford <rob@linux.intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gobject
-import gtk
-import xmlrpclib
-from bb.ui.crumbs.runningbuild import RunningBuildTreeView, RunningBuild
-from bb.ui.crumbs.progress import ProgressBar
-
-import Queue
-
-
-def event_handle_idle_func (eventHandler, build, pbar):
-
- # Consume as many messages as we can in the time available to us
- event = eventHandler.getEvent()
- while event:
- build.handle_event (event, pbar)
- event = eventHandler.getEvent()
-
- return True
-
-def scroll_tv_cb (model, path, iter, view):
- view.scroll_to_cell (path)
-
-
-# @todo hook these into the GUI so the user has feedback...
-def running_build_failed_cb (running_build):
- pass
-
-
-def running_build_succeeded_cb (running_build):
- pass
-
-
-class MainWindow (gtk.Window):
- def __init__ (self):
- gtk.Window.__init__ (self, gtk.WINDOW_TOPLEVEL)
-
- # Setup tree view and the scrolled window
- scrolled_window = gtk.ScrolledWindow ()
- self.add (scrolled_window)
- self.cur_build_tv = RunningBuildTreeView()
- self.connect("delete-event", gtk.main_quit)
- self.set_default_size(640, 480)
- scrolled_window.add (self.cur_build_tv)
-
-
-def main (server, eventHandler):
- gobject.threads_init()
- gtk.gdk.threads_init()
-
- window = MainWindow ()
- window.show_all ()
- pbar = ProgressBar(window)
- pbar.connect("delete-event", gtk.main_quit)
-
- # Create the object for the current build
- running_build = RunningBuild ()
- window.cur_build_tv.set_model (running_build.model)
- running_build.model.connect("row-inserted", scroll_tv_cb, window.cur_build_tv)
- running_build.connect ("build-succeeded", running_build_succeeded_cb)
- running_build.connect ("build-failed", running_build_failed_cb)
-
- try:
- cmdline = server.runCommand(["getCmdLineAction"])
- if not cmdline:
- return 1
- ret = server.runCommand(cmdline)
- if ret != True:
- print("Couldn't get default commandline! %s" % ret)
- return 1
- except xmlrpclib.Fault as x:
- print("XMLRPC Fault getting commandline:\n %s" % x)
- return 1
-
- # Use a timeout function for probing the event queue to find out if we
- # have a message waiting for us.
- gobject.timeout_add (100,
- event_handle_idle_func,
- eventHandler,
- running_build,
- pbar)
-
- try:
- gtk.main()
- except EnvironmentError as ioerror:
- # ignore interrupted io
- if ioerror.args[0] == 4:
- pass
- finally:
- server.runCommand(["stateStop"])
-
diff --git a/bitbake/lib/bb/ui/knotty.py b/bitbake/lib/bb/ui/knotty.py
deleted file mode 100644
index 042dbe902c..0000000000
--- a/bitbake/lib/bb/ui/knotty.py
+++ /dev/null
@@ -1,248 +0,0 @@
-#
-# BitBake (No)TTY UI Implementation
-#
-# Handling output to TTYs or files (no TTY)
-#
-# Copyright (C) 2006-2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-from __future__ import division
-
-import os
-import sys
-import xmlrpclib
-import logging
-import progressbar
-import bb.msg
-from bb.ui import uihelper
-
-logger = logging.getLogger("BitBake")
-interactive = sys.stdout.isatty()
-
-class BBProgress(progressbar.ProgressBar):
- def __init__(self, msg, maxval):
- self.msg = msg
- widgets = [progressbar.Percentage(), ' ', progressbar.Bar(), ' ',
- progressbar.ETA()]
-
- progressbar.ProgressBar.__init__(self, maxval, [self.msg + ": "] + widgets)
-
-class NonInteractiveProgress(object):
- fobj = sys.stdout
-
- def __init__(self, msg, maxval):
- self.msg = msg
- self.maxval = maxval
-
- def start(self):
- self.fobj.write("%s..." % self.msg)
- self.fobj.flush()
- return self
-
- def update(self, value):
- pass
-
- def finish(self):
- self.fobj.write("done.\n")
- self.fobj.flush()
-
-def new_progress(msg, maxval):
- if interactive:
- return BBProgress(msg, maxval)
- else:
- return NonInteractiveProgress(msg, maxval)
-
-def main(server, eventHandler):
-
- # Get values of variables which control our output
- includelogs = server.runCommand(["getVariable", "BBINCLUDELOGS"])
- loglines = server.runCommand(["getVariable", "BBINCLUDELOGS_LINES"])
-
- helper = uihelper.BBUIHelper()
-
- console = logging.StreamHandler(sys.stdout)
- format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
- console.setFormatter(format)
- logger.addHandler(console)
-
- try:
- cmdline = server.runCommand(["getCmdLineAction"])
- if not cmdline:
- return 1
- ret = server.runCommand(cmdline)
- if ret != True:
- print("Couldn't get default commandline! %s" % ret)
- return 1
- except xmlrpclib.Fault as x:
- print("XMLRPC Fault getting commandline:\n %s" % x)
- return 1
-
-
- parseprogress = None
- cacheprogress = None
- shutdown = 0
- return_value = 0
- while True:
- try:
- event = eventHandler.waitEvent(0.25)
- if event is None:
- continue
- helper.eventHandler(event)
- if isinstance(event, bb.runqueue.runQueueExitWait):
- if not shutdown:
- shutdown = 1
- if shutdown and helper.needUpdate:
- activetasks, failedtasks = helper.getTasks()
- if activetasks:
- print("Waiting for %s active tasks to finish:" % len(activetasks))
- for tasknum, task in enumerate(activetasks):
- print("%s: %s (pid %s)" % (tasknum, activetasks[task]["title"], task))
-
- if isinstance(event, logging.LogRecord):
- if event.levelno >= format.ERROR:
- return_value = 1
- # For "normal" logging conditions, don't show note logs from tasks
- # but do show them if the user has changed the default log level to
- # include verbose/debug messages
- if logger.getEffectiveLevel() > format.VERBOSE:
- if event.taskpid != 0 and event.levelno <= format.NOTE:
- continue
- logger.handle(event)
- continue
-
- if isinstance(event, bb.build.TaskFailed):
- return_value = 1
- logfile = event.logfile
- if logfile and os.path.exists(logfile):
- print("ERROR: Logfile of failure stored in: %s" % logfile)
- if 1 or includelogs:
- print("Log data follows:")
- f = open(logfile, "r")
- lines = []
- while True:
- l = f.readline()
- if l == '':
- break
- l = l.rstrip()
- if loglines:
- lines.append(' | %s' % l)
- if len(lines) > int(loglines):
- lines.pop(0)
- else:
- print('| %s' % l)
- f.close()
- if lines:
- for line in lines:
- print(line)
- if isinstance(event, bb.build.TaskBase):
- logger.info(event._message)
- continue
- if isinstance(event, bb.event.ParseStarted):
- parseprogress = new_progress("Parsing recipes", event.total).start()
- continue
- if isinstance(event, bb.event.ParseProgress):
- parseprogress.update(event.current)
- continue
- if isinstance(event, bb.event.ParseCompleted):
- parseprogress.finish()
- print(("Parsing of %d .bb files complete (%d cached, %d parsed). %d targets, %d skipped, %d masked, %d errors."
- % ( event.total, event.cached, event.parsed, event.virtuals, event.skipped, event.masked, event.errors)))
- continue
-
- if isinstance(event, bb.event.CacheLoadStarted):
- cacheprogress = new_progress("Loading cache", event.total).start()
- continue
- if isinstance(event, bb.event.CacheLoadProgress):
- cacheprogress.update(event.current)
- continue
- if isinstance(event, bb.event.CacheLoadCompleted):
- cacheprogress.finish()
- print("Loaded %d entries from dependency cache." % event.num_entries)
- continue
-
- if isinstance(event, bb.command.CommandCompleted):
- break
- if isinstance(event, bb.command.CommandFailed):
- return_value = event.exitcode
- logger.error("Command execution failed: %s", event.error)
- break
- if isinstance(event, bb.command.CommandExit):
- return_value = event.exitcode
- continue
- if isinstance(event, bb.cooker.CookerExit):
- break
- if isinstance(event, bb.event.MultipleProviders):
- logger.info("multiple providers are available for %s%s (%s)", event._is_runtime and "runtime " or "",
- event._item,
- ", ".join(event._candidates))
- logger.info("consider defining a PREFERRED_PROVIDER entry to match %s", event._item)
- continue
- if isinstance(event, bb.event.NoProvider):
- if event._runtime:
- r = "R"
- else:
- r = ""
-
- if event._dependees:
- logger.error("Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)", r, event._item, ", ".join(event._dependees), r)
- else:
- logger.error("Nothing %sPROVIDES '%s'", r, event._item)
- continue
-
- if isinstance(event, bb.runqueue.runQueueTaskStarted):
- if event.noexec:
- tasktype = 'noexec task'
- else:
- tasktype = 'task'
- logger.info("Running %s %s of %s (ID: %s, %s)",
- tasktype,
- event.stats.completed + event.stats.active +
- event.stats.failed + 1,
- event.stats.total, event.taskid, event.taskstring)
- continue
-
- if isinstance(event, bb.runqueue.runQueueTaskFailed):
- logger.error("Task %s (%s) failed with exit code '%s'",
- event.taskid, event.taskstring, event.exitcode)
- continue
-
- # ignore
- if isinstance(event, (bb.event.BuildBase,
- bb.event.StampUpdate,
- bb.event.ConfigParsed,
- bb.event.RecipeParsed,
- bb.runqueue.runQueueEvent,
- bb.runqueue.runQueueExitWait)):
- continue
-
- logger.error("Unknown event: %s", event)
-
- except EnvironmentError as ioerror:
- # ignore interrupted io
- if ioerror.args[0] == 4:
- pass
- except KeyboardInterrupt:
- if shutdown == 2:
- print("\nThird Keyboard Interrupt, exit.\n")
- break
- if shutdown == 1:
- print("\nSecond Keyboard Interrupt, stopping...\n")
- server.runCommand(["stateStop"])
- if shutdown == 0:
- print("\nKeyboard Interrupt, closing down...\n")
- server.runCommand(["stateShutdown"])
- shutdown = shutdown + 1
- pass
- return return_value
diff --git a/bitbake/lib/bb/ui/ncurses.py b/bitbake/lib/bb/ui/ncurses.py
deleted file mode 100644
index 469f1b7309..0000000000
--- a/bitbake/lib/bb/ui/ncurses.py
+++ /dev/null
@@ -1,352 +0,0 @@
-#
-# BitBake Curses UI Implementation
-#
-# Implements an ncurses frontend for the BitBake utility.
-#
-# Copyright (C) 2006 Michael 'Mickey' Lauer
-# Copyright (C) 2006-2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-"""
- We have the following windows:
-
- 1.) Main Window: Shows what we are ultimately building and how far we are. Includes status bar
- 2.) Thread Activity Window: Shows one status line for every concurrent bitbake thread.
- 3.) Command Line Window: Contains an interactive command line where you can interact w/ Bitbake.
-
- Basic window layout is like that:
-
- |---------------------------------------------------------|
- | <Main Window> | <Thread Activity Window> |
- | | 0: foo do_compile complete|
- | Building Gtk+-2.6.10 | 1: bar do_patch complete |
- | Status: 60% | ... |
- | | ... |
- | | ... |
- |---------------------------------------------------------|
- |<Command Line Window> |
- |>>> which virtual/kernel |
- |openzaurus-kernel |
- |>>> _ |
- |---------------------------------------------------------|
-
-"""
-
-
-from __future__ import division
-import logging
-import os, sys, curses, itertools, time
-import bb
-import xmlrpclib
-from bb import ui
-from bb.ui import uihelper
-
-parsespin = itertools.cycle( r'|/-\\' )
-
-X = 0
-Y = 1
-WIDTH = 2
-HEIGHT = 3
-
-MAXSTATUSLENGTH = 32
-
-class NCursesUI:
- """
- NCurses UI Class
- """
- class Window:
- """Base Window Class"""
- def __init__( self, x, y, width, height, fg=curses.COLOR_BLACK, bg=curses.COLOR_WHITE ):
- self.win = curses.newwin( height, width, y, x )
- self.dimensions = ( x, y, width, height )
- """
- if curses.has_colors():
- color = 1
- curses.init_pair( color, fg, bg )
- self.win.bkgdset( ord(' '), curses.color_pair(color) )
- else:
- self.win.bkgdset( ord(' '), curses.A_BOLD )
- """
- self.erase()
- self.setScrolling()
- self.win.noutrefresh()
-
- def erase( self ):
- self.win.erase()
-
- def setScrolling( self, b = True ):
- self.win.scrollok( b )
- self.win.idlok( b )
-
- def setBoxed( self ):
- self.boxed = True
- self.win.box()
- self.win.noutrefresh()
-
- def setText( self, x, y, text, *args ):
- self.win.addstr( y, x, text, *args )
- self.win.noutrefresh()
-
- def appendText( self, text, *args ):
- self.win.addstr( text, *args )
- self.win.noutrefresh()
-
- def drawHline( self, y ):
- self.win.hline( y, 0, curses.ACS_HLINE, self.dimensions[WIDTH] )
- self.win.noutrefresh()
-
- class DecoratedWindow( Window ):
- """Base class for windows with a box and a title bar"""
- def __init__( self, title, x, y, width, height, fg=curses.COLOR_BLACK, bg=curses.COLOR_WHITE ):
- NCursesUI.Window.__init__( self, x+1, y+3, width-2, height-4, fg, bg )
- self.decoration = NCursesUI.Window( x, y, width, height, fg, bg )
- self.decoration.setBoxed()
- self.decoration.win.hline( 2, 1, curses.ACS_HLINE, width-2 )
- self.setTitle( title )
-
- def setTitle( self, title ):
- self.decoration.setText( 1, 1, title.center( self.dimensions[WIDTH]-2 ), curses.A_BOLD )
-
- #-------------------------------------------------------------------------#
-# class TitleWindow( Window ):
- #-------------------------------------------------------------------------#
-# """Title Window"""
-# def __init__( self, x, y, width, height ):
-# NCursesUI.Window.__init__( self, x, y, width, height )
-# version = bb.__version__
-# title = "BitBake %s" % version
-# credit = "(C) 2003-2007 Team BitBake"
-# #self.win.hline( 2, 1, curses.ACS_HLINE, width-2 )
-# self.win.border()
-# self.setText( 1, 1, title.center( self.dimensions[WIDTH]-2 ), curses.A_BOLD )
-# self.setText( 1, 2, credit.center( self.dimensions[WIDTH]-2 ), curses.A_BOLD )
-
- #-------------------------------------------------------------------------#
- class ThreadActivityWindow( DecoratedWindow ):
- #-------------------------------------------------------------------------#
- """Thread Activity Window"""
- def __init__( self, x, y, width, height ):
- NCursesUI.DecoratedWindow.__init__( self, "Thread Activity", x, y, width, height )
-
- def setStatus( self, thread, text ):
- line = "%02d: %s" % ( thread, text )
- width = self.dimensions[WIDTH]
- if ( len(line) > width ):
- line = line[:width-3] + "..."
- else:
- line = line.ljust( width )
- self.setText( 0, thread, line )
-
- #-------------------------------------------------------------------------#
- class MainWindow( DecoratedWindow ):
- #-------------------------------------------------------------------------#
- """Main Window"""
- def __init__( self, x, y, width, height ):
- self.StatusPosition = width - MAXSTATUSLENGTH
- NCursesUI.DecoratedWindow.__init__( self, None, x, y, width, height )
- curses.nl()
-
- def setTitle( self, title ):
- title = "BitBake %s" % bb.__version__
- self.decoration.setText( 2, 1, title, curses.A_BOLD )
- self.decoration.setText( self.StatusPosition - 8, 1, "Status:", curses.A_BOLD )
-
- def setStatus(self, status):
- while len(status) < MAXSTATUSLENGTH:
- status = status + " "
- self.decoration.setText( self.StatusPosition, 1, status, curses.A_BOLD )
-
-
- #-------------------------------------------------------------------------#
- class ShellOutputWindow( DecoratedWindow ):
- #-------------------------------------------------------------------------#
- """Interactive Command Line Output"""
- def __init__( self, x, y, width, height ):
- NCursesUI.DecoratedWindow.__init__( self, "Command Line Window", x, y, width, height )
-
- #-------------------------------------------------------------------------#
- class ShellInputWindow( Window ):
- #-------------------------------------------------------------------------#
- """Interactive Command Line Input"""
- def __init__( self, x, y, width, height ):
- NCursesUI.Window.__init__( self, x, y, width, height )
-
-# put that to the top again from curses.textpad import Textbox
-# self.textbox = Textbox( self.win )
-# t = threading.Thread()
-# t.run = self.textbox.edit
-# t.start()
-
- #-------------------------------------------------------------------------#
- def main(self, stdscr, server, eventHandler):
- #-------------------------------------------------------------------------#
- height, width = stdscr.getmaxyx()
-
- # for now split it like that:
- # MAIN_y + THREAD_y = 2/3 screen at the top
- # MAIN_x = 2/3 left, THREAD_y = 1/3 right
- # CLI_y = 1/3 of screen at the bottom
- # CLI_x = full
-
- main_left = 0
- main_top = 0
- main_height = ( height // 3 * 2 )
- main_width = ( width // 3 ) * 2
- clo_left = main_left
- clo_top = main_top + main_height
- clo_height = height - main_height - main_top - 1
- clo_width = width
- cli_left = main_left
- cli_top = clo_top + clo_height
- cli_height = 1
- cli_width = width
- thread_left = main_left + main_width
- thread_top = main_top
- thread_height = main_height
- thread_width = width - main_width
-
- #tw = self.TitleWindow( 0, 0, width, main_top )
- mw = self.MainWindow( main_left, main_top, main_width, main_height )
- taw = self.ThreadActivityWindow( thread_left, thread_top, thread_width, thread_height )
- clo = self.ShellOutputWindow( clo_left, clo_top, clo_width, clo_height )
- cli = self.ShellInputWindow( cli_left, cli_top, cli_width, cli_height )
- cli.setText( 0, 0, "BB>" )
-
- mw.setStatus("Idle")
-
- helper = uihelper.BBUIHelper()
- shutdown = 0
-
- try:
- cmdline = server.runCommand(["getCmdLineAction"])
- if not cmdline:
- return
- ret = server.runCommand(cmdline)
- if ret != True:
- print("Couldn't get default commandlind! %s" % ret)
- return
- except xmlrpclib.Fault as x:
- print("XMLRPC Fault getting commandline:\n %s" % x)
- return
-
- exitflag = False
- while not exitflag:
- try:
- event = eventHandler.waitEvent(0.25)
- if not event:
- continue
-
- helper.eventHandler(event)
- if isinstance(event, bb.build.TaskBase):
- mw.appendText("NOTE: %s\n" % event._message)
- if isinstance(event, logging.LogRecord):
- mw.appendText(logging.getLevelName(event.levelno) + ': ' + event.getMessage() + '\n')
-
- if isinstance(event, bb.event.CacheLoadStarted):
- self.parse_total = event.total
- if isinstance(event, bb.event.CacheLoadProgress):
- x = event.current
- y = self.parse_total
- mw.setStatus("Loading Cache: %s [%2d %%]" % ( next(parsespin), x*100/y ) )
- if isinstance(event, bb.event.CacheLoadCompleted):
- mw.setStatus("Idle")
- mw.appendText("Loaded %d entries from dependency cache.\n"
- % ( event.num_entries))
-
- if isinstance(event, bb.event.ParseStarted):
- self.parse_total = event.total
- if isinstance(event, bb.event.ParseProgress):
- x = event.current
- y = self.parse_total
- mw.setStatus("Parsing Recipes: %s [%2d %%]" % ( next(parsespin), x*100/y ) )
- if isinstance(event, bb.event.ParseCompleted):
- mw.setStatus("Idle")
- mw.appendText("Parsing finished. %d cached, %d parsed, %d skipped, %d masked.\n"
- % ( event.cached, event.parsed, event.skipped, event.masked ))
-
-# if isinstance(event, bb.build.TaskFailed):
-# if event.logfile:
-# if data.getVar("BBINCLUDELOGS", d):
-# bb.msg.error(bb.msg.domain.Build, "log data follows (%s)" % logfile)
-# number_of_lines = data.getVar("BBINCLUDELOGS_LINES", d)
-# if number_of_lines:
-# os.system('tail -n%s %s' % (number_of_lines, logfile))
-# else:
-# f = open(logfile, "r")
-# while True:
-# l = f.readline()
-# if l == '':
-# break
-# l = l.rstrip()
-# print '| %s' % l
-# f.close()
-# else:
-# bb.msg.error(bb.msg.domain.Build, "see log in %s" % logfile)
-
- if isinstance(event, bb.command.CommandCompleted):
- # stop so the user can see the result of the build, but
- # also allow them to now exit with a single ^C
- shutdown = 2
- if isinstance(event, bb.command.CommandFailed):
- mw.appendText("Command execution failed: %s" % event.error)
- time.sleep(2)
- exitflag = True
- if isinstance(event, bb.command.CommandExit):
- exitflag = True
- if isinstance(event, bb.cooker.CookerExit):
- exitflag = True
-
- if helper.needUpdate:
- activetasks, failedtasks = helper.getTasks()
- taw.erase()
- taw.setText(0, 0, "")
- if activetasks:
- taw.appendText("Active Tasks:\n")
- for task in activetasks.itervalues():
- taw.appendText(task["title"] + '\n')
- if failedtasks:
- taw.appendText("Failed Tasks:\n")
- for task in failedtasks:
- taw.appendText(task["title"] + '\n')
-
- curses.doupdate()
- except EnvironmentError as ioerror:
- # ignore interrupted io
- if ioerror.args[0] == 4:
- pass
-
- except KeyboardInterrupt:
- if shutdown == 2:
- mw.appendText("Third Keyboard Interrupt, exit.\n")
- exitflag = True
- if shutdown == 1:
- mw.appendText("Second Keyboard Interrupt, stopping...\n")
- server.runCommand(["stateStop"])
- if shutdown == 0:
- mw.appendText("Keyboard Interrupt, closing down...\n")
- server.runCommand(["stateShutdown"])
- shutdown = shutdown + 1
- pass
-
-def main(server, eventHandler):
- if not os.isatty(sys.stdout.fileno()):
- print("FATAL: Unable to run 'ncurses' UI without a TTY.")
- return
- ui = NCursesUI()
- try:
- curses.wrapper(ui.main, server, eventHandler)
- except:
- import traceback
- traceback.print_exc()
diff --git a/bitbake/lib/bb/ui/puccho.py b/bitbake/lib/bb/ui/puccho.py
deleted file mode 100644
index 3ce4590c16..0000000000
--- a/bitbake/lib/bb/ui/puccho.py
+++ /dev/null
@@ -1,425 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2008 Intel Corporation
-#
-# Authored by Rob Bradford <rob@linux.intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gtk
-import gobject
-import gtk.glade
-import threading
-import urllib2
-import os
-import contextlib
-
-from bb.ui.crumbs.buildmanager import BuildManager, BuildConfiguration
-from bb.ui.crumbs.buildmanager import BuildManagerTreeView
-
-from bb.ui.crumbs.runningbuild import RunningBuild, RunningBuildTreeView
-
-# The metadata loader is used by the BuildSetupDialog to download the
-# available options to populate the dialog
-class MetaDataLoader(gobject.GObject):
- """ This class provides the mechanism for loading the metadata (the
- fetching and parsing) from a given URL. The metadata encompasses details
- on what machines are available. The distribution and images available for
- the machine and the the uris to use for building the given machine."""
- __gsignals__ = {
- 'success' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- 'error' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- (gobject.TYPE_STRING,))
- }
-
- # We use these little helper functions to ensure that we take the gdk lock
- # when emitting the signal. These functions are called as idles (so that
- # they happen in the gtk / main thread's main loop.
- def emit_error_signal (self, remark):
- gtk.gdk.threads_enter()
- self.emit ("error", remark)
- gtk.gdk.threads_leave()
-
- def emit_success_signal (self):
- gtk.gdk.threads_enter()
- self.emit ("success")
- gtk.gdk.threads_leave()
-
- def __init__ (self):
- gobject.GObject.__init__ (self)
-
- class LoaderThread(threading.Thread):
- """ This class provides an asynchronous loader for the metadata (by
- using threads and signals). This is useful since the metadata may be
- at a remote URL."""
- class LoaderImportException (Exception):
- pass
-
- def __init__(self, loader, url):
- threading.Thread.__init__ (self)
- self.url = url
- self.loader = loader
-
- def run (self):
- result = {}
- try:
- with contextlib.closing (urllib2.urlopen (self.url)) as f:
- # Parse the metadata format. The format is....
- # <machine>;<default distro>|<distro>...;<default image>|<image>...;<type##url>|...
- for line in f:
- components = line.split(";")
- if (len (components) < 4):
- raise MetaDataLoader.LoaderThread.LoaderImportException
- machine = components[0]
- distros = components[1].split("|")
- images = components[2].split("|")
- urls = components[3].split("|")
-
- result[machine] = (distros, images, urls)
-
- # Create an object representing this *potential*
- # configuration. It can become concrete if the machine, distro
- # and image are all chosen in the UI
- configuration = BuildConfiguration()
- configuration.metadata_url = self.url
- configuration.machine_options = result
- self.loader.configuration = configuration
-
- # Emit that we've actually got a configuration
- gobject.idle_add (MetaDataLoader.emit_success_signal,
- self.loader)
-
- except MetaDataLoader.LoaderThread.LoaderImportException as e:
- gobject.idle_add (MetaDataLoader.emit_error_signal, self.loader,
- "Repository metadata corrupt")
- except Exception as e:
- gobject.idle_add (MetaDataLoader.emit_error_signal, self.loader,
- "Unable to download repository metadata")
- print(e)
-
- def try_fetch_from_url (self, url):
- # Try and download the metadata. Firing a signal if successful
- thread = MetaDataLoader.LoaderThread(self, url)
- thread.start()
-
-class BuildSetupDialog (gtk.Dialog):
- RESPONSE_BUILD = 1
-
- # A little helper method that just sets the states on the widgets based on
- # whether we've got good metadata or not.
- def set_configurable (self, configurable):
- if (self.configurable == configurable):
- return
-
- self.configurable = configurable
- for widget in self.conf_widgets:
- widget.set_sensitive (configurable)
-
- if not configurable:
- self.machine_combo.set_active (-1)
- self.distribution_combo.set_active (-1)
- self.image_combo.set_active (-1)
-
- # GTK widget callbacks
- def refresh_button_clicked (self, button):
- # Refresh button clicked.
-
- url = self.location_entry.get_chars (0, -1)
- self.loader.try_fetch_from_url(url)
-
- def repository_entry_editable_changed (self, entry):
- if (len (entry.get_chars (0, -1)) > 0):
- self.refresh_button.set_sensitive (True)
- else:
- self.refresh_button.set_sensitive (False)
- self.clear_status_message()
-
- # If we were previously configurable we are no longer since the
- # location entry has been changed
- self.set_configurable (False)
-
- def machine_combo_changed (self, combobox):
- active_iter = combobox.get_active_iter()
-
- if not active_iter:
- return
-
- model = combobox.get_model()
-
- if model:
- chosen_machine = model.get (active_iter, 0)[0]
-
- (distros_model, images_model) = \
- self.loader.configuration.get_distro_and_images_models (chosen_machine)
-
- self.distribution_combo.set_model (distros_model)
- self.image_combo.set_model (images_model)
-
- # Callbacks from the loader
- def loader_success_cb (self, loader):
- self.status_image.set_from_icon_name ("info",
- gtk.ICON_SIZE_BUTTON)
- self.status_image.show()
- self.status_label.set_label ("Repository metadata successfully downloaded")
-
- # Set the models on the combo boxes based on the models generated from
- # the configuration that the loader has created
-
- # We just need to set the machine here, that then determines the
- # distro and image options. Cunning huh? :-)
-
- self.configuration = self.loader.configuration
- model = self.configuration.get_machines_model ()
- self.machine_combo.set_model (model)
-
- self.set_configurable (True)
-
- def loader_error_cb (self, loader, message):
- self.status_image.set_from_icon_name ("error",
- gtk.ICON_SIZE_BUTTON)
- self.status_image.show()
- self.status_label.set_text ("Error downloading repository metadata")
- for widget in self.conf_widgets:
- widget.set_sensitive (False)
-
- def clear_status_message (self):
- self.status_image.hide()
- self.status_label.set_label (
- """<i>Enter the repository location and press _Refresh</i>""")
-
- def __init__ (self):
- gtk.Dialog.__init__ (self)
-
- # Cancel
- self.add_button (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL)
-
- # Build
- button = gtk.Button ("_Build", None, True)
- image = gtk.Image ()
- image.set_from_stock (gtk.STOCK_EXECUTE, gtk.ICON_SIZE_BUTTON)
- button.set_image (image)
- self.add_action_widget (button, BuildSetupDialog.RESPONSE_BUILD)
- button.show_all ()
-
- # Pull in *just* the table from the Glade XML data.
- gxml = gtk.glade.XML (os.path.dirname(__file__) + "/crumbs/puccho.glade",
- root = "build_table")
- table = gxml.get_widget ("build_table")
- self.vbox.pack_start (table, True, False, 0)
-
- # Grab all the widgets that we need to turn on/off when we refresh...
- self.conf_widgets = []
- self.conf_widgets += [gxml.get_widget ("machine_label")]
- self.conf_widgets += [gxml.get_widget ("distribution_label")]
- self.conf_widgets += [gxml.get_widget ("image_label")]
- self.conf_widgets += [gxml.get_widget ("machine_combo")]
- self.conf_widgets += [gxml.get_widget ("distribution_combo")]
- self.conf_widgets += [gxml.get_widget ("image_combo")]
-
- # Grab the status widgets
- self.status_image = gxml.get_widget ("status_image")
- self.status_label = gxml.get_widget ("status_label")
-
- # Grab the refresh button and connect to the clicked signal
- self.refresh_button = gxml.get_widget ("refresh_button")
- self.refresh_button.connect ("clicked", self.refresh_button_clicked)
-
- # Grab the location entry and connect to editable::changed
- self.location_entry = gxml.get_widget ("location_entry")
- self.location_entry.connect ("changed",
- self.repository_entry_editable_changed)
-
- # Grab the machine combo and hook onto the changed signal. This then
- # allows us to populate the distro and image combos
- self.machine_combo = gxml.get_widget ("machine_combo")
- self.machine_combo.connect ("changed", self.machine_combo_changed)
-
- # Setup the combo
- cell = gtk.CellRendererText()
- self.machine_combo.pack_start(cell, True)
- self.machine_combo.add_attribute(cell, 'text', 0)
-
- # Grab the distro and image combos. We need these to populate with
- # models once the machine is chosen
- self.distribution_combo = gxml.get_widget ("distribution_combo")
- cell = gtk.CellRendererText()
- self.distribution_combo.pack_start(cell, True)
- self.distribution_combo.add_attribute(cell, 'text', 0)
-
- self.image_combo = gxml.get_widget ("image_combo")
- cell = gtk.CellRendererText()
- self.image_combo.pack_start(cell, True)
- self.image_combo.add_attribute(cell, 'text', 0)
-
- # Put the default descriptive text in the status box
- self.clear_status_message()
-
- # Mark as non-configurable, this is just greys out the widgets the
- # user can't yet use
- self.configurable = False
- self.set_configurable(False)
-
- # Show the table
- table.show_all ()
-
- # The loader and some signals connected to it to update the status
- # area
- self.loader = MetaDataLoader()
- self.loader.connect ("success", self.loader_success_cb)
- self.loader.connect ("error", self.loader_error_cb)
-
- def update_configuration (self):
- """ A poorly named function but it updates the internal configuration
- from the widgets. This can make that configuration concrete and can
- thus be used for building """
- # Extract the chosen machine from the combo
- model = self.machine_combo.get_model()
- active_iter = self.machine_combo.get_active_iter()
- if (active_iter):
- self.configuration.machine = model.get(active_iter, 0)[0]
-
- # Extract the chosen distro from the combo
- model = self.distribution_combo.get_model()
- active_iter = self.distribution_combo.get_active_iter()
- if (active_iter):
- self.configuration.distro = model.get(active_iter, 0)[0]
-
- # Extract the chosen image from the combo
- model = self.image_combo.get_model()
- active_iter = self.image_combo.get_active_iter()
- if (active_iter):
- self.configuration.image = model.get(active_iter, 0)[0]
-
-# This function operates to pull events out from the event queue and then push
-# them into the RunningBuild (which then drives the RunningBuild which then
-# pushes through and updates the progress tree view.)
-#
-# TODO: Should be a method on the RunningBuild class
-def event_handle_timeout (eventHandler, build):
- # Consume as many messages as we can ...
- event = eventHandler.getEvent()
- while event:
- build.handle_event (event)
- event = eventHandler.getEvent()
- return True
-
-class MainWindow (gtk.Window):
-
- # Callback that gets fired when the user hits a button in the
- # BuildSetupDialog.
- def build_dialog_box_response_cb (self, dialog, response_id):
- conf = None
- if (response_id == BuildSetupDialog.RESPONSE_BUILD):
- dialog.update_configuration()
- print(dialog.configuration.machine, dialog.configuration.distro, \
- dialog.configuration.image)
- conf = dialog.configuration
-
- dialog.destroy()
-
- if conf:
- self.manager.do_build (conf)
-
- def build_button_clicked_cb (self, button):
- dialog = BuildSetupDialog ()
-
- # For some unknown reason Dialog.run causes nice little deadlocks ... :-(
- dialog.connect ("response", self.build_dialog_box_response_cb)
- dialog.show()
-
- def __init__ (self):
- gtk.Window.__init__ (self)
-
- # Pull in *just* the main vbox from the Glade XML data and then pack
- # that inside the window
- gxml = gtk.glade.XML (os.path.dirname(__file__) + "/crumbs/puccho.glade",
- root = "main_window_vbox")
- vbox = gxml.get_widget ("main_window_vbox")
- self.add (vbox)
-
- # Create the tree views for the build manager view and the progress view
- self.build_manager_view = BuildManagerTreeView()
- self.running_build_view = RunningBuildTreeView()
-
- # Grab the scrolled windows that we put the tree views into
- self.results_scrolledwindow = gxml.get_widget ("results_scrolledwindow")
- self.progress_scrolledwindow = gxml.get_widget ("progress_scrolledwindow")
-
- # Put the tree views inside ...
- self.results_scrolledwindow.add (self.build_manager_view)
- self.progress_scrolledwindow.add (self.running_build_view)
-
- # Hook up the build button...
- self.build_button = gxml.get_widget ("main_toolbutton_build")
- self.build_button.connect ("clicked", self.build_button_clicked_cb)
-
-# I'm not very happy about the current ownership of the RunningBuild. I have
-# my suspicions that this object should be held by the BuildManager since we
-# care about the signals in the manager
-
-def running_build_succeeded_cb (running_build, manager):
- # Notify the manager that a build has succeeded. This is necessary as part
- # of the 'hack' that we use for making the row in the model / view
- # representing the ongoing build change into a row representing the
- # completed build. Since we know only one build can be running a time then
- # we can handle this.
-
- # FIXME: Refactor all this so that the RunningBuild is owned by the
- # BuildManager. It can then hook onto the signals directly and drive
- # interesting things it cares about.
- manager.notify_build_succeeded ()
- print("build succeeded")
-
-def running_build_failed_cb (running_build, manager):
- # As above
- print("build failed")
- manager.notify_build_failed ()
-
-def main (server, eventHandler):
- # Initialise threading...
- gobject.threads_init()
- gtk.gdk.threads_init()
-
- main_window = MainWindow ()
- main_window.show_all ()
-
- # Set up the build manager stuff in general
- builds_dir = os.path.join (os.getcwd(), "results")
- manager = BuildManager (server, builds_dir)
- main_window.build_manager_view.set_model (manager.model)
-
- # Do the running build setup
- running_build = RunningBuild ()
- main_window.running_build_view.set_model (running_build.model)
- running_build.connect ("build-succeeded", running_build_succeeded_cb,
- manager)
- running_build.connect ("build-failed", running_build_failed_cb, manager)
-
- # We need to save the manager into the MainWindow so that the toolbar
- # button can use it.
- # FIXME: Refactor ?
- main_window.manager = manager
-
- # Use a timeout function for probing the event queue to find out if we
- # have a message waiting for us.
- gobject.timeout_add (200,
- event_handle_timeout,
- eventHandler,
- running_build)
-
- gtk.main()
diff --git a/bitbake/lib/bb/ui/uievent.py b/bitbake/lib/bb/ui/uievent.py
deleted file mode 100644
index 2fef4e4659..0000000000
--- a/bitbake/lib/bb/ui/uievent.py
+++ /dev/null
@@ -1,127 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
-# Copyright (C) 2006 - 2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-
-"""
-Use this class to fork off a thread to recieve event callbacks from the bitbake
-server and queue them for the UI to process. This process must be used to avoid
-client/server deadlocks.
-"""
-
-import socket, threading, pickle
-from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
-
-class BBUIEventQueue:
- def __init__(self, BBServer):
-
- self.eventQueue = []
- self.eventQueueLock = threading.Lock()
- self.eventQueueNotify = threading.Event()
-
- self.BBServer = BBServer
-
- self.t = threading.Thread()
- self.t.setDaemon(True)
- self.t.run = self.startCallbackHandler
- self.t.start()
-
- def getEvent(self):
-
- self.eventQueueLock.acquire()
-
- if len(self.eventQueue) == 0:
- self.eventQueueLock.release()
- return None
-
- item = self.eventQueue.pop(0)
-
- if len(self.eventQueue) == 0:
- self.eventQueueNotify.clear()
-
- self.eventQueueLock.release()
- return item
-
- def waitEvent(self, delay):
- self.eventQueueNotify.wait(delay)
- return self.getEvent()
-
- def queue_event(self, event):
- self.eventQueueLock.acquire()
- self.eventQueue.append(event)
- self.eventQueueNotify.set()
- self.eventQueueLock.release()
-
- def send_event(self, event):
- self.queue_event(pickle.loads(event))
-
- def startCallbackHandler(self):
-
- server = UIXMLRPCServer()
- self.host, self.port = server.socket.getsockname()
-
- server.register_function( self.system_quit, "event.quit" )
- server.register_function( self.send_event, "event.send" )
- server.socket.settimeout(1)
-
- self.EventHandle = self.BBServer.registerEventHandler(self.host, self.port)
-
- self.server = server
- while not server.quit:
- server.handle_request()
- server.server_close()
-
- def system_quit( self ):
- """
- Shut down the callback thread
- """
- try:
- self.BBServer.unregisterEventHandler(self.EventHandle)
- except:
- pass
- self.server.quit = True
-
-class UIXMLRPCServer (SimpleXMLRPCServer):
-
- def __init__( self, interface = ("localhost", 0) ):
- self.quit = False
- SimpleXMLRPCServer.__init__( self,
- interface,
- requestHandler=SimpleXMLRPCRequestHandler,
- logRequests=False, allow_none=True)
-
- def get_request(self):
- while not self.quit:
- try:
- sock, addr = self.socket.accept()
- sock.settimeout(1)
- return (sock, addr)
- except socket.timeout:
- pass
- return (None, None)
-
- def close_request(self, request):
- if request is None:
- return
- SimpleXMLRPCServer.close_request(self, request)
-
- def process_request(self, request, client_address):
- if request is None:
- return
- SimpleXMLRPCServer.process_request(self, request, client_address)
-
diff --git a/bitbake/lib/bb/ui/uihelper.py b/bitbake/lib/bb/ui/uihelper.py
deleted file mode 100644
index 617d60db82..0000000000
--- a/bitbake/lib/bb/ui/uihelper.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
-# Copyright (C) 2006 - 2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import bb.build
-
-class BBUIHelper:
- def __init__(self):
- self.needUpdate = False
- self.running_tasks = {}
- self.failed_tasks = []
-
- def eventHandler(self, event):
- if isinstance(event, bb.build.TaskStarted):
- self.running_tasks[event.pid] = { 'title' : "%s %s" % (event._package, event._task) }
- self.needUpdate = True
- if isinstance(event, bb.build.TaskSucceeded):
- del self.running_tasks[event.pid]
- self.needUpdate = True
- if isinstance(event, bb.build.TaskFailed):
- del self.running_tasks[event.pid]
- self.failed_tasks.append( { 'title' : "%s %s" % (event._package, event._task)})
- self.needUpdate = True
-
- def getTasks(self):
- self.needUpdate = False
- return (self.running_tasks, self.failed_tasks)
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
deleted file mode 100644
index b2f8bb6f89..0000000000
--- a/bitbake/lib/bb/utils.py
+++ /dev/null
@@ -1,845 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake Utility Functions
-"""
-
-# Copyright (C) 2004 Michael Lauer
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import re, fcntl, os, string, stat, shutil, time
-import sys
-import errno
-import logging
-import bb
-import bb.msg
-from commands import getstatusoutput
-from contextlib import contextmanager
-
-logger = logging.getLogger("BitBake.Util")
-
-# Version comparison
-separators = ".-"
-
-# Context used in better_exec, eval
-_context = {
- "os": os,
- "bb": bb,
- "time": time,
-}
-
-def explode_version(s):
- r = []
- alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$')
- numeric_regexp = re.compile('^(\d+)(.*)$')
- while (s != ''):
- if s[0] in string.digits:
- m = numeric_regexp.match(s)
- r.append(int(m.group(1)))
- s = m.group(2)
- continue
- if s[0] in string.letters:
- m = alpha_regexp.match(s)
- r.append(m.group(1))
- s = m.group(2)
- continue
- r.append(s[0])
- s = s[1:]
- return r
-
-def vercmp_part(a, b):
- va = explode_version(a)
- vb = explode_version(b)
- sa = False
- sb = False
- while True:
- if va == []:
- ca = None
- else:
- ca = va.pop(0)
- if vb == []:
- cb = None
- else:
- cb = vb.pop(0)
- if ca == None and cb == None:
- return 0
-
- if isinstance(ca, basestring):
- sa = ca in separators
- if isinstance(cb, basestring):
- sb = cb in separators
- if sa and not sb:
- return -1
- if not sa and sb:
- return 1
-
- if ca > cb:
- return 1
- if ca < cb:
- return -1
-
-def vercmp(ta, tb):
- (ea, va, ra) = ta
- (eb, vb, rb) = tb
-
- r = int(ea or 0) - int(eb or 0)
- if (r == 0):
- r = vercmp_part(va, vb)
- if (r == 0):
- r = vercmp_part(ra, rb)
- return r
-
-_package_weights_ = {"pre":-2, "p":0, "alpha":-4, "beta":-3, "rc":-1} # dicts are unordered
-_package_ends_ = ["pre", "p", "alpha", "beta", "rc", "cvs", "bk", "HEAD" ] # so we need ordered list
-
-def relparse(myver):
- """Parses the last elements of a version number into a triplet, that can
- later be compared.
- """
-
- number = 0
- p1 = 0
- p2 = 0
- mynewver = myver.split('_')
- if len(mynewver) == 2:
- # an _package_weights_
- number = float(mynewver[0])
- match = 0
- for x in _package_ends_:
- elen = len(x)
- if mynewver[1][:elen] == x:
- match = 1
- p1 = _package_weights_[x]
- try:
- p2 = float(mynewver[1][elen:])
- except:
- p2 = 0
- break
- if not match:
- # normal number or number with letter at end
- divider = len(myver)-1
- if myver[divider:] not in "1234567890":
- # letter at end
- p1 = ord(myver[divider:])
- number = float(myver[0:divider])
- else:
- number = float(myver)
- else:
- # normal number or number with letter at end
- divider = len(myver)-1
- if myver[divider:] not in "1234567890":
- #letter at end
- p1 = ord(myver[divider:])
- number = float(myver[0:divider])
- else:
- number = float(myver)
- return [number, p1, p2]
-
-__vercmp_cache__ = {}
-
-def vercmp_string(val1, val2):
- """This takes two version strings and returns an integer to tell you whether
- the versions are the same, val1>val2 or val2>val1.
- """
-
- # quick short-circuit
- if val1 == val2:
- return 0
- valkey = val1 + " " + val2
-
- # cache lookup
- try:
- return __vercmp_cache__[valkey]
- try:
- return - __vercmp_cache__[val2 + " " + val1]
- except KeyError:
- pass
- except KeyError:
- pass
-
- # consider 1_p2 vc 1.1
- # after expansion will become (1_p2,0) vc (1,1)
- # then 1_p2 is compared with 1 before 0 is compared with 1
- # to solve the bug we need to convert it to (1,0_p2)
- # by splitting _prepart part and adding it back _after_expansion
-
- val1_prepart = val2_prepart = ''
- if val1.count('_'):
- val1, val1_prepart = val1.split('_', 1)
- if val2.count('_'):
- val2, val2_prepart = val2.split('_', 1)
-
- # replace '-' by '.'
- # FIXME: Is it needed? can val1/2 contain '-'?
-
- val1 = val1.split("-")
- if len(val1) == 2:
- val1[0] = val1[0] + "." + val1[1]
- val2 = val2.split("-")
- if len(val2) == 2:
- val2[0] = val2[0] + "." + val2[1]
-
- val1 = val1[0].split('.')
- val2 = val2[0].split('.')
-
- # add back decimal point so that .03 does not become "3" !
- for x in xrange(1, len(val1)):
- if val1[x][0] == '0' :
- val1[x] = '.' + val1[x]
- for x in xrange(1, len(val2)):
- if val2[x][0] == '0' :
- val2[x] = '.' + val2[x]
-
- # extend varion numbers
- if len(val2) < len(val1):
- val2.extend(["0"]*(len(val1)-len(val2)))
- elif len(val1) < len(val2):
- val1.extend(["0"]*(len(val2)-len(val1)))
-
- # add back _prepart tails
- if val1_prepart:
- val1[-1] += '_' + val1_prepart
- if val2_prepart:
- val2[-1] += '_' + val2_prepart
- # The above code will extend version numbers out so they
- # have the same number of digits.
- for x in xrange(0, len(val1)):
- cmp1 = relparse(val1[x])
- cmp2 = relparse(val2[x])
- for y in xrange(0, 3):
- myret = cmp1[y] - cmp2[y]
- if myret != 0:
- __vercmp_cache__[valkey] = myret
- return myret
- __vercmp_cache__[valkey] = 0
- return 0
-
-def explode_deps(s):
- """
- Take an RDEPENDS style string of format:
- "DEPEND1 (optional version) DEPEND2 (optional version) ..."
- and return a list of dependencies.
- Version information is ignored.
- """
- r = []
- l = s.split()
- flag = False
- for i in l:
- if i[0] == '(':
- flag = True
- #j = []
- if not flag:
- r.append(i)
- #else:
- # j.append(i)
- if flag and i.endswith(')'):
- flag = False
- # Ignore version
- #r[-1] += ' ' + ' '.join(j)
- return r
-
-def explode_dep_versions(s):
- """
- Take an RDEPENDS style string of format:
- "DEPEND1 (optional version) DEPEND2 (optional version) ..."
- and return a dictionary of dependencies and versions.
- """
- r = {}
- l = s.replace(",", "").split()
- lastdep = None
- lastver = ""
- inversion = False
- for i in l:
- if i[0] == '(':
- inversion = True
- lastver = i[1:] or ""
- #j = []
- elif inversion and i.endswith(')'):
- inversion = False
- lastver = lastver + " " + (i[:-1] or "")
- r[lastdep] = lastver
- elif not inversion:
- r[i] = None
- lastdep = i
- lastver = ""
- elif inversion:
- lastver = lastver + " " + i
-
- return r
-
-def join_deps(deps):
- """
- Take the result from explode_dep_versions and generate a dependency string
- """
- result = []
- for dep in deps:
- if deps[dep]:
- result.append(dep + " (" + deps[dep] + ")")
- else:
- result.append(dep)
- return ", ".join(result)
-
-def _print_trace(body, line):
- """
- Print the Environment of a Text Body
- """
- # print the environment of the method
- min_line = max(1, line-4)
- max_line = min(line + 4, len(body))
- for i in xrange(min_line, max_line + 1):
- if line == i:
- logger.error(' *** %.4d:%s', i, body[i-1])
- else:
- logger.error(' %.4d:%s', i, body[i-1])
-
-def better_compile(text, file, realfile, mode = "exec"):
- """
- A better compile method. This method
- will print the offending lines.
- """
- try:
- return compile(text, file, mode)
- except Exception as e:
- # split the text into lines again
- body = text.split('\n')
- logger.error("Error in compiling python function in %s", realfile)
- logger.error(str(e))
- if e.lineno:
- logger.error("The lines leading to this error were:")
- logger.error("\t%d:%s:'%s'", e.lineno, e.__class__.__name__, body[e.lineno-1])
- _print_trace(body, e.lineno)
- else:
- logger.error("The function causing this error was:")
- for line in body:
- logger.error(line)
-
- raise
-
-def better_exec(code, context, text, realfile = "<code>"):
- """
- Similiar to better_compile, better_exec will
- print the lines that are responsible for the
- error.
- """
- import bb.parse
- if not hasattr(code, "co_filename"):
- code = better_compile(code, realfile, realfile)
- try:
- exec(code, _context, context)
- except Exception:
- (t, value, tb) = sys.exc_info()
-
- if t in [bb.parse.SkipPackage, bb.build.FuncFailed]:
- raise
-
- import traceback
- exception = traceback.format_exception_only(t, value)
- logger.error('Error executing a python function in %s:\n%s',
- realfile, ''.join(exception))
-
- # Strip 'us' from the stack (better_exec call)
- tb = tb.tb_next
-
- textarray = text.split('\n')
- linefailed = traceback.tb_lineno(tb)
-
- tbextract = traceback.extract_tb(tb)
- tbformat = "\n".join(traceback.format_list(tbextract))
- logger.error("The stack trace of python calls that resulted in this exception/failure was:")
- for line in tbformat.split('\n'):
- logger.error(line)
-
- logger.error("The code that was being executed was:")
- _print_trace(textarray, linefailed)
- logger.error("(file: '%s', lineno: %s, function: %s)", tbextract[0][0], tbextract[0][1], tbextract[0][2])
-
- # See if this is a function we constructed and has calls back into other functions in
- # "text". If so, try and improve the context of the error by diving down the trace
- level = 0
- nexttb = tb.tb_next
- while nexttb is not None:
- if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
- _print_trace(textarray, tbextract[level+1][1])
- logger.error("(file: '%s', lineno: %s, function: %s)", tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2])
- else:
- break
- nexttb = tb.tb_next
- level = level + 1
-
- raise
-
-def simple_exec(code, context):
- exec(code, _context, context)
-
-def better_eval(source, locals):
- return eval(source, _context, locals)
-
-@contextmanager
-def fileslocked(files):
- """Context manager for locking and unlocking file locks."""
- locks = []
- if files:
- for lockfile in files:
- locks.append(bb.utils.lockfile(lockfile))
-
- yield
-
- for lock in locks:
- bb.utils.unlockfile(lock)
-
-def lockfile(name, shared=False):
- """
- Use the file fn as a lock file, return when the lock has been acquired.
- Returns a variable to pass to unlockfile().
- """
- path = os.path.dirname(name)
- if not os.path.isdir(path):
- logger.error("Lockfile destination directory '%s' does not exist", path)
- sys.exit(1)
-
- if not os.access(path, os.W_OK):
- logger.error("Error, lockfile path is not writable!: %s" % path)
- sys.exit(1)
-
- op = fcntl.LOCK_EX
- if shared:
- op = fcntl.LOCK_SH
-
- while True:
- # If we leave the lockfiles lying around there is no problem
- # but we should clean up after ourselves. This gives potential
- # for races though. To work around this, when we acquire the lock
- # we check the file we locked was still the lock file on disk.
- # by comparing inode numbers. If they don't match or the lockfile
- # no longer exists, we start again.
-
- # This implementation is unfair since the last person to request the
- # lock is the most likely to win it.
-
- try:
- lf = open(name, 'a+')
- fileno = lf.fileno()
- fcntl.flock(fileno, op)
- statinfo = os.fstat(fileno)
- if os.path.exists(lf.name):
- statinfo2 = os.stat(lf.name)
- if statinfo.st_ino == statinfo2.st_ino:
- return lf
- lf.close()
- except Exception:
- continue
-
-def unlockfile(lf):
- """
- Unlock a file locked using lockfile()
- """
- try:
- # If we had a shared lock, we need to promote to exclusive before
- # removing the lockfile. Attempt this, ignore failures.
- fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
- os.unlink(lf.name)
- except (IOError, OSError):
- pass
- fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
- lf.close()
-
-def md5_file(filename):
- """
- Return the hex string representation of the MD5 checksum of filename.
- """
- try:
- import hashlib
- m = hashlib.md5()
- except ImportError:
- import md5
- m = md5.new()
-
- for line in open(filename):
- m.update(line)
- return m.hexdigest()
-
-def sha256_file(filename):
- """
- Return the hex string representation of the 256-bit SHA checksum of
- filename. On Python 2.4 this will return None, so callers will need to
- handle that by either skipping SHA checks, or running a standalone sha256sum
- binary.
- """
- try:
- import hashlib
- except ImportError:
- return None
-
- s = hashlib.sha256()
- for line in open(filename):
- s.update(line)
- return s.hexdigest()
-
-def preserved_envvars_exported():
- """Variables which are taken from the environment and placed in and exported
- from the metadata"""
- return [
- 'BB_TASKHASH',
- 'HOME',
- 'LOGNAME',
- 'PATH',
- 'PWD',
- 'SHELL',
- 'TERM',
- 'USER',
- 'USERNAME',
- ]
-
-def preserved_envvars_exported_interactive():
- """Variables which are taken from the environment and placed in and exported
- from the metadata, for interactive tasks"""
- return [
- 'COLORTERM',
- 'DBUS_SESSION_BUS_ADDRESS',
- 'DESKTOP_SESSION',
- 'DESKTOP_STARTUP_ID',
- 'DISPLAY',
- 'GNOME_KEYRING_PID',
- 'GNOME_KEYRING_SOCKET',
- 'GPG_AGENT_INFO',
- 'GTK_RC_FILES',
- 'SESSION_MANAGER',
- 'KRB5CCNAME',
- 'SSH_AUTH_SOCK',
- 'XAUTHORITY',
- 'XDG_DATA_DIRS',
- 'XDG_SESSION_COOKIE',
- ]
-
-def preserved_envvars():
- """Variables which are taken from the environment and placed in the metadata"""
- v = [
- 'BBPATH',
- 'BB_PRESERVE_ENV',
- 'BB_ENV_WHITELIST',
- 'BB_ENV_EXTRAWHITE',
- 'LANG',
- '_',
- ]
- return v + preserved_envvars_exported() + preserved_envvars_exported_interactive()
-
-def filter_environment(good_vars):
- """
- Create a pristine environment for bitbake. This will remove variables that
- are not known and may influence the build in a negative way.
- """
-
- removed_vars = []
- for key in os.environ.keys():
- if key in good_vars:
- continue
-
- removed_vars.append(key)
- os.unsetenv(key)
- del os.environ[key]
-
- if len(removed_vars):
- logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars))
-
- return removed_vars
-
-def create_interactive_env(d):
- for k in preserved_envvars_exported_interactive():
- os.setenv(k, bb.data.getVar(k, d, True))
-
-def clean_environment():
- """
- Clean up any spurious environment variables. This will remove any
- variables the user hasn't chose to preserve.
- """
- if 'BB_PRESERVE_ENV' not in os.environ:
- if 'BB_ENV_WHITELIST' in os.environ:
- good_vars = os.environ['BB_ENV_WHITELIST'].split()
- else:
- good_vars = preserved_envvars()
- if 'BB_ENV_EXTRAWHITE' in os.environ:
- good_vars.extend(os.environ['BB_ENV_EXTRAWHITE'].split())
- filter_environment(good_vars)
-
-def empty_environment():
- """
- Remove all variables from the environment.
- """
- for s in os.environ.keys():
- os.unsetenv(s)
- del os.environ[s]
-
-def build_environment(d):
- """
- Build an environment from all exported variables.
- """
- import bb.data
- for var in bb.data.keys(d):
- export = bb.data.getVarFlag(var, "export", d)
- if export:
- os.environ[var] = bb.data.getVar(var, d, True) or ""
-
-def remove(path, recurse=False):
- """Equivalent to rm -f or rm -rf"""
- if not path:
- return
- import os, errno, shutil, glob
- for name in glob.glob(path):
- try:
- os.unlink(name)
- except OSError as exc:
- if recurse and exc.errno == errno.EISDIR:
- shutil.rmtree(name)
- elif exc.errno != errno.ENOENT:
- raise
-
-def prunedir(topdir):
- # Delete everything reachable from the directory named in 'topdir'.
- # CAUTION: This is dangerous!
- for root, dirs, files in os.walk(topdir, topdown = False):
- for name in files:
- os.remove(os.path.join(root, name))
- for name in dirs:
- if os.path.islink(os.path.join(root, name)):
- os.remove(os.path.join(root, name))
- else:
- os.rmdir(os.path.join(root, name))
- os.rmdir(topdir)
-
-#
-# Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
-# but thats possibly insane and suffixes is probably going to be small
-#
-def prune_suffix(var, suffixes, d):
- # See if var ends with any of the suffixes listed and
- # remove it if found
- for suffix in suffixes:
- if var.endswith(suffix):
- return var.replace(suffix, "")
- return var
-
-def mkdirhier(directory):
- """Create a directory like 'mkdir -p', but does not complain if
- directory already exists like os.makedirs
- """
-
- try:
- os.makedirs(directory)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise e
-
-def movefile(src, dest, newmtime = None, sstat = None):
- """Moves a file from src to dest, preserving all permissions and
- attributes; mtime will be preserved even when moving across
- filesystems. Returns true on success and false on failure. Move is
- atomic.
- """
-
- #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
- try:
- if not sstat:
- sstat = os.lstat(src)
- except Exception as e:
- print("movefile: Stating source file failed...", e)
- return None
-
- destexists = 1
- try:
- dstat = os.lstat(dest)
- except:
- dstat = os.lstat(os.path.dirname(dest))
- destexists = 0
-
- if destexists:
- if stat.S_ISLNK(dstat[stat.ST_MODE]):
- try:
- os.unlink(dest)
- destexists = 0
- except Exception as e:
- pass
-
- if stat.S_ISLNK(sstat[stat.ST_MODE]):
- try:
- target = os.readlink(src)
- if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
- os.unlink(dest)
- os.symlink(target, dest)
- #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
- os.unlink(src)
- return os.lstat(dest)
- except Exception as e:
- print("movefile: failed to properly create symlink:", dest, "->", target, e)
- return None
-
- renamefailed = 1
- if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
- try:
- os.rename(src, dest)
- renamefailed = 0
- except Exception as e:
- if e[0] != errno.EXDEV:
- # Some random error.
- print("movefile: Failed to move", src, "to", dest, e)
- return None
- # Invalid cross-device-link 'bind' mounted or actually Cross-Device
-
- if renamefailed:
- didcopy = 0
- if stat.S_ISREG(sstat[stat.ST_MODE]):
- try: # For safety copy then move it over.
- shutil.copyfile(src, dest + "#new")
- os.rename(dest + "#new", dest)
- didcopy = 1
- except Exception as e:
- print('movefile: copy', src, '->', dest, 'failed.', e)
- return None
- else:
- #we don't yet handle special, so we need to fall back to /bin/mv
- a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
- if a[0] != 0:
- print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
- return None # failure
- try:
- if didcopy:
- os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
- os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
- os.unlink(src)
- except Exception as e:
- print("movefile: Failed to chown/chmod/unlink", dest, e)
- return None
-
- if newmtime:
- os.utime(dest, (newmtime, newmtime))
- else:
- os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
- newmtime = sstat[stat.ST_MTIME]
- return newmtime
-
-def copyfile(src, dest, newmtime = None, sstat = None):
- """
- Copies a file from src to dest, preserving all permissions and
- attributes; mtime will be preserved even when moving across
- filesystems. Returns true on success and false on failure.
- """
- #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
- try:
- if not sstat:
- sstat = os.lstat(src)
- except Exception as e:
- print("copyfile: Stating source file failed...", e)
- return False
-
- destexists = 1
- try:
- dstat = os.lstat(dest)
- except:
- dstat = os.lstat(os.path.dirname(dest))
- destexists = 0
-
- if destexists:
- if stat.S_ISLNK(dstat[stat.ST_MODE]):
- try:
- os.unlink(dest)
- destexists = 0
- except Exception as e:
- pass
-
- if stat.S_ISLNK(sstat[stat.ST_MODE]):
- try:
- target = os.readlink(src)
- if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
- os.unlink(dest)
- os.symlink(target, dest)
- #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
- return os.lstat(dest)
- except Exception as e:
- print("copyfile: failed to properly create symlink:", dest, "->", target, e)
- return False
-
- if stat.S_ISREG(sstat[stat.ST_MODE]):
- os.chmod(src, stat.S_IRUSR) # Make sure we can read it
- try: # For safety copy then move it over.
- shutil.copyfile(src, dest + "#new")
- os.rename(dest + "#new", dest)
- except Exception as e:
- print('copyfile: copy', src, '->', dest, 'failed.', e)
- return False
- finally:
- os.chmod(src, sstat[stat.ST_MODE])
- os.utime(src, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
-
- else:
- #we don't yet handle special, so we need to fall back to /bin/mv
- a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
- if a[0] != 0:
- print("copyfile: Failed to copy special file:" + src + "' to '" + dest + "'", a)
- return False # failure
- try:
- os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
- os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
- except Exception as e:
- print("copyfile: Failed to chown/chmod/unlink", dest, e)
- return False
-
- if newmtime:
- os.utime(dest, (newmtime, newmtime))
- else:
- os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
- newmtime = sstat[stat.ST_MTIME]
- return newmtime
-
-def which(path, item, direction = 0):
- """
- Locate a file in a PATH
- """
-
- paths = (path or "").split(':')
- if direction != 0:
- paths.reverse()
-
- for p in paths:
- next = os.path.join(p, item)
- if os.path.exists(next):
- return next
-
- return ""
-
-def init_logger(logger, verbose, debug, debug_domains):
- """
- Set verbosity and debug levels in the logger
- """
-
- if debug:
- bb.msg.set_debug_level(debug)
- elif verbose:
- bb.msg.set_verbose(True)
- else:
- bb.msg.set_debug_level(0)
-
- if debug_domains:
- bb.msg.set_debug_domains(debug_domains)
-
-def to_boolean(string, default=None):
- if not string:
- return default
-
- normalized = string.lower()
- if normalized in ("y", "yes", "1", "true"):
- return True
- elif normalized in ("n", "no", "0", "false"):
- return False
- else:
- raise ValueError("Invalid value for to_boolean: %s" % string)
diff --git a/bitbake/lib/codegen.py b/bitbake/lib/codegen.py
deleted file mode 100644
index be772d5107..0000000000
--- a/bitbake/lib/codegen.py
+++ /dev/null
@@ -1,570 +0,0 @@
-# -*- coding: utf-8 -*-
-"""
- codegen
- ~~~~~~~
-
- Extension to ast that allow ast -> python code generation.
-
- :copyright: Copyright 2008 by Armin Ronacher.
- :license: BSD.
-"""
-from ast import *
-
-BOOLOP_SYMBOLS = {
- And: 'and',
- Or: 'or'
-}
-
-BINOP_SYMBOLS = {
- Add: '+',
- Sub: '-',
- Mult: '*',
- Div: '/',
- FloorDiv: '//',
- Mod: '%',
- LShift: '<<',
- RShift: '>>',
- BitOr: '|',
- BitAnd: '&',
- BitXor: '^'
-}
-
-CMPOP_SYMBOLS = {
- Eq: '==',
- Gt: '>',
- GtE: '>=',
- In: 'in',
- Is: 'is',
- IsNot: 'is not',
- Lt: '<',
- LtE: '<=',
- NotEq: '!=',
- NotIn: 'not in'
-}
-
-UNARYOP_SYMBOLS = {
- Invert: '~',
- Not: 'not',
- UAdd: '+',
- USub: '-'
-}
-
-ALL_SYMBOLS = {}
-ALL_SYMBOLS.update(BOOLOP_SYMBOLS)
-ALL_SYMBOLS.update(BINOP_SYMBOLS)
-ALL_SYMBOLS.update(CMPOP_SYMBOLS)
-ALL_SYMBOLS.update(UNARYOP_SYMBOLS)
-
-def to_source(node, indent_with=' ' * 4, add_line_information=False):
- """This function can convert a node tree back into python sourcecode.
- This is useful for debugging purposes, especially if you're dealing with
- custom asts not generated by python itself.
-
- It could be that the sourcecode is evaluable when the AST itself is not
- compilable / evaluable. The reason for this is that the AST contains some
- more data than regular sourcecode does, which is dropped during
- conversion.
-
- Each level of indentation is replaced with `indent_with`. Per default this
- parameter is equal to four spaces as suggested by PEP 8, but it might be
- adjusted to match the application's styleguide.
-
- If `add_line_information` is set to `True` comments for the line numbers
- of the nodes are added to the output. This can be used to spot wrong line
- number information of statement nodes.
- """
- generator = SourceGenerator(indent_with, add_line_information)
- generator.visit(node)
- return ''.join(generator.result)
-
-
-class SourceGenerator(NodeVisitor):
- """This visitor is able to transform a well formed syntax tree into python
- sourcecode. For more details have a look at the docstring of the
- `node_to_source` function.
- """
-
- def __init__(self, indent_with, add_line_information=False):
- self.result = []
- self.indent_with = indent_with
- self.add_line_information = add_line_information
- self.indentation = 0
- self.new_lines = 0
-
- def write(self, x):
- if self.new_lines:
- if self.result:
- self.result.append('\n' * self.new_lines)
- self.result.append(self.indent_with * self.indentation)
- self.new_lines = 0
- self.result.append(x)
-
- def newline(self, node=None, extra=0):
- self.new_lines = max(self.new_lines, 1 + extra)
- if node is not None and self.add_line_information:
- self.write('# line: %s' % node.lineno)
- self.new_lines = 1
-
- def body(self, statements):
- self.new_line = True
- self.indentation += 1
- for stmt in statements:
- self.visit(stmt)
- self.indentation -= 1
-
- def body_or_else(self, node):
- self.body(node.body)
- if node.orelse:
- self.newline()
- self.write('else:')
- self.body(node.orelse)
-
- def signature(self, node):
- want_comma = []
- def write_comma():
- if want_comma:
- self.write(', ')
- else:
- want_comma.append(True)
-
- padding = [None] * (len(node.args) - len(node.defaults))
- for arg, default in zip(node.args, padding + node.defaults):
- write_comma()
- self.visit(arg)
- if default is not None:
- self.write('=')
- self.visit(default)
- if node.vararg is not None:
- write_comma()
- self.write('*' + node.vararg)
- if node.kwarg is not None:
- write_comma()
- self.write('**' + node.kwarg)
-
- def decorators(self, node):
- for decorator in node.decorator_list:
- self.newline(decorator)
- self.write('@')
- self.visit(decorator)
-
- # Statements
-
- def visit_Assign(self, node):
- self.newline(node)
- for idx, target in enumerate(node.targets):
- if idx:
- self.write(', ')
- self.visit(target)
- self.write(' = ')
- self.visit(node.value)
-
- def visit_AugAssign(self, node):
- self.newline(node)
- self.visit(node.target)
- self.write(BINOP_SYMBOLS[type(node.op)] + '=')
- self.visit(node.value)
-
- def visit_ImportFrom(self, node):
- self.newline(node)
- self.write('from %s%s import ' % ('.' * node.level, node.module))
- for idx, item in enumerate(node.names):
- if idx:
- self.write(', ')
- self.write(item)
-
- def visit_Import(self, node):
- self.newline(node)
- for item in node.names:
- self.write('import ')
- self.visit(item)
-
- def visit_Expr(self, node):
- self.newline(node)
- self.generic_visit(node)
-
- def visit_FunctionDef(self, node):
- self.newline(extra=1)
- self.decorators(node)
- self.newline(node)
- self.write('def %s(' % node.name)
- self.signature(node.args)
- self.write('):')
- self.body(node.body)
-
- def visit_ClassDef(self, node):
- have_args = []
- def paren_or_comma():
- if have_args:
- self.write(', ')
- else:
- have_args.append(True)
- self.write('(')
-
- self.newline(extra=2)
- self.decorators(node)
- self.newline(node)
- self.write('class %s' % node.name)
- for base in node.bases:
- paren_or_comma()
- self.visit(base)
- # XXX: the if here is used to keep this module compatible
- # with python 2.6.
- if hasattr(node, 'keywords'):
- for keyword in node.keywords:
- paren_or_comma()
- self.write(keyword.arg + '=')
- self.visit(keyword.value)
- if node.starargs is not None:
- paren_or_comma()
- self.write('*')
- self.visit(node.starargs)
- if node.kwargs is not None:
- paren_or_comma()
- self.write('**')
- self.visit(node.kwargs)
- self.write(have_args and '):' or ':')
- self.body(node.body)
-
- def visit_If(self, node):
- self.newline(node)
- self.write('if ')
- self.visit(node.test)
- self.write(':')
- self.body(node.body)
- while True:
- else_ = node.orelse
- if len(else_) == 1 and isinstance(else_[0], If):
- node = else_[0]
- self.newline()
- self.write('elif ')
- self.visit(node.test)
- self.write(':')
- self.body(node.body)
- else:
- self.newline()
- self.write('else:')
- self.body(else_)
- break
-
- def visit_For(self, node):
- self.newline(node)
- self.write('for ')
- self.visit(node.target)
- self.write(' in ')
- self.visit(node.iter)
- self.write(':')
- self.body_or_else(node)
-
- def visit_While(self, node):
- self.newline(node)
- self.write('while ')
- self.visit(node.test)
- self.write(':')
- self.body_or_else(node)
-
- def visit_With(self, node):
- self.newline(node)
- self.write('with ')
- self.visit(node.context_expr)
- if node.optional_vars is not None:
- self.write(' as ')
- self.visit(node.optional_vars)
- self.write(':')
- self.body(node.body)
-
- def visit_Pass(self, node):
- self.newline(node)
- self.write('pass')
-
- def visit_Print(self, node):
- # XXX: python 2.6 only
- self.newline(node)
- self.write('print ')
- want_comma = False
- if node.dest is not None:
- self.write(' >> ')
- self.visit(node.dest)
- want_comma = True
- for value in node.values:
- if want_comma:
- self.write(', ')
- self.visit(value)
- want_comma = True
- if not node.nl:
- self.write(',')
-
- def visit_Delete(self, node):
- self.newline(node)
- self.write('del ')
- for idx, target in enumerate(node):
- if idx:
- self.write(', ')
- self.visit(target)
-
- def visit_TryExcept(self, node):
- self.newline(node)
- self.write('try:')
- self.body(node.body)
- for handler in node.handlers:
- self.visit(handler)
-
- def visit_TryFinally(self, node):
- self.newline(node)
- self.write('try:')
- self.body(node.body)
- self.newline(node)
- self.write('finally:')
- self.body(node.finalbody)
-
- def visit_Global(self, node):
- self.newline(node)
- self.write('global ' + ', '.join(node.names))
-
- def visit_Nonlocal(self, node):
- self.newline(node)
- self.write('nonlocal ' + ', '.join(node.names))
-
- def visit_Return(self, node):
- self.newline(node)
- self.write('return ')
- self.visit(node.value)
-
- def visit_Break(self, node):
- self.newline(node)
- self.write('break')
-
- def visit_Continue(self, node):
- self.newline(node)
- self.write('continue')
-
- def visit_Raise(self, node):
- # XXX: Python 2.6 / 3.0 compatibility
- self.newline(node)
- self.write('raise')
- if hasattr(node, 'exc') and node.exc is not None:
- self.write(' ')
- self.visit(node.exc)
- if node.cause is not None:
- self.write(' from ')
- self.visit(node.cause)
- elif hasattr(node, 'type') and node.type is not None:
- self.visit(node.type)
- if node.inst is not None:
- self.write(', ')
- self.visit(node.inst)
- if node.tback is not None:
- self.write(', ')
- self.visit(node.tback)
-
- # Expressions
-
- def visit_Attribute(self, node):
- self.visit(node.value)
- self.write('.' + node.attr)
-
- def visit_Call(self, node):
- want_comma = []
- def write_comma():
- if want_comma:
- self.write(', ')
- else:
- want_comma.append(True)
-
- self.visit(node.func)
- self.write('(')
- for arg in node.args:
- write_comma()
- self.visit(arg)
- for keyword in node.keywords:
- write_comma()
- self.write(keyword.arg + '=')
- self.visit(keyword.value)
- if node.starargs is not None:
- write_comma()
- self.write('*')
- self.visit(node.starargs)
- if node.kwargs is not None:
- write_comma()
- self.write('**')
- self.visit(node.kwargs)
- self.write(')')
-
- def visit_Name(self, node):
- self.write(node.id)
-
- def visit_Str(self, node):
- self.write(repr(node.s))
-
- def visit_Bytes(self, node):
- self.write(repr(node.s))
-
- def visit_Num(self, node):
- self.write(repr(node.n))
-
- def visit_Tuple(self, node):
- self.write('(')
- idx = -1
- for idx, item in enumerate(node.elts):
- if idx:
- self.write(', ')
- self.visit(item)
- self.write(idx and ')' or ',)')
-
- def sequence_visit(left, right):
- def visit(self, node):
- self.write(left)
- for idx, item in enumerate(node.elts):
- if idx:
- self.write(', ')
- self.visit(item)
- self.write(right)
- return visit
-
- visit_List = sequence_visit('[', ']')
- visit_Set = sequence_visit('{', '}')
- del sequence_visit
-
- def visit_Dict(self, node):
- self.write('{')
- for idx, (key, value) in enumerate(zip(node.keys, node.values)):
- if idx:
- self.write(', ')
- self.visit(key)
- self.write(': ')
- self.visit(value)
- self.write('}')
-
- def visit_BinOp(self, node):
- self.visit(node.left)
- self.write(' %s ' % BINOP_SYMBOLS[type(node.op)])
- self.visit(node.right)
-
- def visit_BoolOp(self, node):
- self.write('(')
- for idx, value in enumerate(node.values):
- if idx:
- self.write(' %s ' % BOOLOP_SYMBOLS[type(node.op)])
- self.visit(value)
- self.write(')')
-
- def visit_Compare(self, node):
- self.write('(')
- self.write(node.left)
- for op, right in zip(node.ops, node.comparators):
- self.write(' %s %%' % CMPOP_SYMBOLS[type(op)])
- self.visit(right)
- self.write(')')
-
- def visit_UnaryOp(self, node):
- self.write('(')
- op = UNARYOP_SYMBOLS[type(node.op)]
- self.write(op)
- if op == 'not':
- self.write(' ')
- self.visit(node.operand)
- self.write(')')
-
- def visit_Subscript(self, node):
- self.visit(node.value)
- self.write('[')
- self.visit(node.slice)
- self.write(']')
-
- def visit_Slice(self, node):
- if node.lower is not None:
- self.visit(node.lower)
- self.write(':')
- if node.upper is not None:
- self.visit(node.upper)
- if node.step is not None:
- self.write(':')
- if not (isinstance(node.step, Name) and node.step.id == 'None'):
- self.visit(node.step)
-
- def visit_ExtSlice(self, node):
- for idx, item in node.dims:
- if idx:
- self.write(', ')
- self.visit(item)
-
- def visit_Yield(self, node):
- self.write('yield ')
- self.visit(node.value)
-
- def visit_Lambda(self, node):
- self.write('lambda ')
- self.signature(node.args)
- self.write(': ')
- self.visit(node.body)
-
- def visit_Ellipsis(self, node):
- self.write('Ellipsis')
-
- def generator_visit(left, right):
- def visit(self, node):
- self.write(left)
- self.visit(node.elt)
- for comprehension in node.generators:
- self.visit(comprehension)
- self.write(right)
- return visit
-
- visit_ListComp = generator_visit('[', ']')
- visit_GeneratorExp = generator_visit('(', ')')
- visit_SetComp = generator_visit('{', '}')
- del generator_visit
-
- def visit_DictComp(self, node):
- self.write('{')
- self.visit(node.key)
- self.write(': ')
- self.visit(node.value)
- for comprehension in node.generators:
- self.visit(comprehension)
- self.write('}')
-
- def visit_IfExp(self, node):
- self.visit(node.body)
- self.write(' if ')
- self.visit(node.test)
- self.write(' else ')
- self.visit(node.orelse)
-
- def visit_Starred(self, node):
- self.write('*')
- self.visit(node.value)
-
- def visit_Repr(self, node):
- # XXX: python 2.6 only
- self.write('`')
- self.visit(node.value)
- self.write('`')
-
- # Helper Nodes
-
- def visit_alias(self, node):
- self.write(node.name)
- if node.asname is not None:
- self.write(' as ' + node.asname)
-
- def visit_comprehension(self, node):
- self.write(' for ')
- self.visit(node.target)
- self.write(' in ')
- self.visit(node.iter)
- if node.ifs:
- for if_ in node.ifs:
- self.write(' if ')
- self.visit(if_)
-
- def visit_excepthandler(self, node):
- self.newline(node)
- self.write('except')
- if node.type is not None:
- self.write(' ')
- self.visit(node.type)
- if node.name is not None:
- self.write(' as ')
- self.visit(node.name)
- self.write(':')
- self.body(node.body)
diff --git a/bitbake/lib/ply/__init__.py b/bitbake/lib/ply/__init__.py
deleted file mode 100644
index 853a985542..0000000000
--- a/bitbake/lib/ply/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-# PLY package
-# Author: David Beazley (dave@dabeaz.com)
-
-__all__ = ['lex','yacc']
diff --git a/bitbake/lib/ply/lex.py b/bitbake/lib/ply/lex.py
deleted file mode 100644
index 267ec100fc..0000000000
--- a/bitbake/lib/ply/lex.py
+++ /dev/null
@@ -1,1058 +0,0 @@
-# -----------------------------------------------------------------------------
-# ply: lex.py
-#
-# Copyright (C) 2001-2009,
-# David M. Beazley (Dabeaz LLC)
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright notice,
-# this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above copyright notice,
-# this list of conditions and the following disclaimer in the documentation
-# and/or other materials provided with the distribution.
-# * Neither the name of the David Beazley or Dabeaz LLC may be used to
-# endorse or promote products derived from this software without
-# specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-# -----------------------------------------------------------------------------
-
-__version__ = "3.3"
-__tabversion__ = "3.2" # Version of table file used
-
-import re, sys, types, copy, os
-
-# This tuple contains known string types
-try:
- # Python 2.6
- StringTypes = (types.StringType, types.UnicodeType)
-except AttributeError:
- # Python 3.0
- StringTypes = (str, bytes)
-
-# Extract the code attribute of a function. Different implementations
-# are for Python 2/3 compatibility.
-
-if sys.version_info[0] < 3:
- def func_code(f):
- return f.func_code
-else:
- def func_code(f):
- return f.__code__
-
-# This regular expression is used to match valid token names
-_is_identifier = re.compile(r'^[a-zA-Z0-9_]+$')
-
-# Exception thrown when invalid token encountered and no default error
-# handler is defined.
-
-class LexError(Exception):
- def __init__(self,message,s):
- self.args = (message,)
- self.text = s
-
-# Token class. This class is used to represent the tokens produced.
-class LexToken(object):
- def __str__(self):
- return "LexToken(%s,%r,%d,%d)" % (self.type,self.value,self.lineno,self.lexpos)
- def __repr__(self):
- return str(self)
-
-# This object is a stand-in for a logging object created by the
-# logging module.
-
-class PlyLogger(object):
- def __init__(self,f):
- self.f = f
- def critical(self,msg,*args,**kwargs):
- self.f.write((msg % args) + "\n")
-
- def warning(self,msg,*args,**kwargs):
- self.f.write("WARNING: "+ (msg % args) + "\n")
-
- def error(self,msg,*args,**kwargs):
- self.f.write("ERROR: " + (msg % args) + "\n")
-
- info = critical
- debug = critical
-
-# Null logger is used when no output is generated. Does nothing.
-class NullLogger(object):
- def __getattribute__(self,name):
- return self
- def __call__(self,*args,**kwargs):
- return self
-
-# -----------------------------------------------------------------------------
-# === Lexing Engine ===
-#
-# The following Lexer class implements the lexer runtime. There are only
-# a few public methods and attributes:
-#
-# input() - Store a new string in the lexer
-# token() - Get the next token
-# clone() - Clone the lexer
-#
-# lineno - Current line number
-# lexpos - Current position in the input string
-# -----------------------------------------------------------------------------
-
-class Lexer:
- def __init__(self):
- self.lexre = None # Master regular expression. This is a list of
- # tuples (re,findex) where re is a compiled
- # regular expression and findex is a list
- # mapping regex group numbers to rules
- self.lexretext = None # Current regular expression strings
- self.lexstatere = {} # Dictionary mapping lexer states to master regexs
- self.lexstateretext = {} # Dictionary mapping lexer states to regex strings
- self.lexstaterenames = {} # Dictionary mapping lexer states to symbol names
- self.lexstate = "INITIAL" # Current lexer state
- self.lexstatestack = [] # Stack of lexer states
- self.lexstateinfo = None # State information
- self.lexstateignore = {} # Dictionary of ignored characters for each state
- self.lexstateerrorf = {} # Dictionary of error functions for each state
- self.lexreflags = 0 # Optional re compile flags
- self.lexdata = None # Actual input data (as a string)
- self.lexpos = 0 # Current position in input text
- self.lexlen = 0 # Length of the input text
- self.lexerrorf = None # Error rule (if any)
- self.lextokens = None # List of valid tokens
- self.lexignore = "" # Ignored characters
- self.lexliterals = "" # Literal characters that can be passed through
- self.lexmodule = None # Module
- self.lineno = 1 # Current line number
- self.lexoptimize = 0 # Optimized mode
-
- def clone(self,object=None):
- c = copy.copy(self)
-
- # If the object parameter has been supplied, it means we are attaching the
- # lexer to a new object. In this case, we have to rebind all methods in
- # the lexstatere and lexstateerrorf tables.
-
- if object:
- newtab = { }
- for key, ritem in self.lexstatere.items():
- newre = []
- for cre, findex in ritem:
- newfindex = []
- for f in findex:
- if not f or not f[0]:
- newfindex.append(f)
- continue
- newfindex.append((getattr(object,f[0].__name__),f[1]))
- newre.append((cre,newfindex))
- newtab[key] = newre
- c.lexstatere = newtab
- c.lexstateerrorf = { }
- for key, ef in self.lexstateerrorf.items():
- c.lexstateerrorf[key] = getattr(object,ef.__name__)
- c.lexmodule = object
- return c
-
- # ------------------------------------------------------------
- # writetab() - Write lexer information to a table file
- # ------------------------------------------------------------
- def writetab(self,tabfile,outputdir=""):
- if isinstance(tabfile,types.ModuleType):
- return
- basetabfilename = tabfile.split(".")[-1]
- filename = os.path.join(outputdir,basetabfilename)+".py"
- tf = open(filename,"w")
- tf.write("# %s.py. This file automatically created by PLY (version %s). Don't edit!\n" % (tabfile,__version__))
- tf.write("_tabversion = %s\n" % repr(__version__))
- tf.write("_lextokens = %s\n" % repr(self.lextokens))
- tf.write("_lexreflags = %s\n" % repr(self.lexreflags))
- tf.write("_lexliterals = %s\n" % repr(self.lexliterals))
- tf.write("_lexstateinfo = %s\n" % repr(self.lexstateinfo))
-
- tabre = { }
- # Collect all functions in the initial state
- initial = self.lexstatere["INITIAL"]
- initialfuncs = []
- for part in initial:
- for f in part[1]:
- if f and f[0]:
- initialfuncs.append(f)
-
- for key, lre in self.lexstatere.items():
- titem = []
- for i in range(len(lre)):
- titem.append((self.lexstateretext[key][i],_funcs_to_names(lre[i][1],self.lexstaterenames[key][i])))
- tabre[key] = titem
-
- tf.write("_lexstatere = %s\n" % repr(tabre))
- tf.write("_lexstateignore = %s\n" % repr(self.lexstateignore))
-
- taberr = { }
- for key, ef in self.lexstateerrorf.items():
- if ef:
- taberr[key] = ef.__name__
- else:
- taberr[key] = None
- tf.write("_lexstateerrorf = %s\n" % repr(taberr))
- tf.close()
-
- # ------------------------------------------------------------
- # readtab() - Read lexer information from a tab file
- # ------------------------------------------------------------
- def readtab(self,tabfile,fdict):
- if isinstance(tabfile,types.ModuleType):
- lextab = tabfile
- else:
- if sys.version_info[0] < 3:
- exec("import %s as lextab" % tabfile)
- else:
- env = { }
- exec("import %s as lextab" % tabfile, env,env)
- lextab = env['lextab']
-
- if getattr(lextab,"_tabversion","0.0") != __version__:
- raise ImportError("Inconsistent PLY version")
-
- self.lextokens = lextab._lextokens
- self.lexreflags = lextab._lexreflags
- self.lexliterals = lextab._lexliterals
- self.lexstateinfo = lextab._lexstateinfo
- self.lexstateignore = lextab._lexstateignore
- self.lexstatere = { }
- self.lexstateretext = { }
- for key,lre in lextab._lexstatere.items():
- titem = []
- txtitem = []
- for i in range(len(lre)):
- titem.append((re.compile(lre[i][0],lextab._lexreflags | re.VERBOSE),_names_to_funcs(lre[i][1],fdict)))
- txtitem.append(lre[i][0])
- self.lexstatere[key] = titem
- self.lexstateretext[key] = txtitem
- self.lexstateerrorf = { }
- for key,ef in lextab._lexstateerrorf.items():
- self.lexstateerrorf[key] = fdict[ef]
- self.begin('INITIAL')
-
- # ------------------------------------------------------------
- # input() - Push a new string into the lexer
- # ------------------------------------------------------------
- def input(self,s):
- # Pull off the first character to see if s looks like a string
- c = s[:1]
- if not isinstance(c,StringTypes):
- raise ValueError("Expected a string")
- self.lexdata = s
- self.lexpos = 0
- self.lexlen = len(s)
-
- # ------------------------------------------------------------
- # begin() - Changes the lexing state
- # ------------------------------------------------------------
- def begin(self,state):
- if not state in self.lexstatere:
- raise ValueError("Undefined state")
- self.lexre = self.lexstatere[state]
- self.lexretext = self.lexstateretext[state]
- self.lexignore = self.lexstateignore.get(state,"")
- self.lexerrorf = self.lexstateerrorf.get(state,None)
- self.lexstate = state
-
- # ------------------------------------------------------------
- # push_state() - Changes the lexing state and saves old on stack
- # ------------------------------------------------------------
- def push_state(self,state):
- self.lexstatestack.append(self.lexstate)
- self.begin(state)
-
- # ------------------------------------------------------------
- # pop_state() - Restores the previous state
- # ------------------------------------------------------------
- def pop_state(self):
- self.begin(self.lexstatestack.pop())
-
- # ------------------------------------------------------------
- # current_state() - Returns the current lexing state
- # ------------------------------------------------------------
- def current_state(self):
- return self.lexstate
-
- # ------------------------------------------------------------
- # skip() - Skip ahead n characters
- # ------------------------------------------------------------
- def skip(self,n):
- self.lexpos += n
-
- # ------------------------------------------------------------
- # opttoken() - Return the next token from the Lexer
- #
- # Note: This function has been carefully implemented to be as fast
- # as possible. Don't make changes unless you really know what
- # you are doing
- # ------------------------------------------------------------
- def token(self):
- # Make local copies of frequently referenced attributes
- lexpos = self.lexpos
- lexlen = self.lexlen
- lexignore = self.lexignore
- lexdata = self.lexdata
-
- while lexpos < lexlen:
- # This code provides some short-circuit code for whitespace, tabs, and other ignored characters
- if lexdata[lexpos] in lexignore:
- lexpos += 1
- continue
-
- # Look for a regular expression match
- for lexre,lexindexfunc in self.lexre:
- m = lexre.match(lexdata,lexpos)
- if not m: continue
-
- # Create a token for return
- tok = LexToken()
- tok.value = m.group()
- tok.lineno = self.lineno
- tok.lexpos = lexpos
-
- i = m.lastindex
- func,tok.type = lexindexfunc[i]
-
- if not func:
- # If no token type was set, it's an ignored token
- if tok.type:
- self.lexpos = m.end()
- return tok
- else:
- lexpos = m.end()
- break
-
- lexpos = m.end()
-
- # If token is processed by a function, call it
-
- tok.lexer = self # Set additional attributes useful in token rules
- self.lexmatch = m
- self.lexpos = lexpos
-
- newtok = func(tok)
-
- # Every function must return a token, if nothing, we just move to next token
- if not newtok:
- lexpos = self.lexpos # This is here in case user has updated lexpos.
- lexignore = self.lexignore # This is here in case there was a state change
- break
-
- # Verify type of the token. If not in the token map, raise an error
- if not self.lexoptimize:
- if not newtok.type in self.lextokens:
- raise LexError("%s:%d: Rule '%s' returned an unknown token type '%s'" % (
- func_code(func).co_filename, func_code(func).co_firstlineno,
- func.__name__, newtok.type),lexdata[lexpos:])
-
- return newtok
- else:
- # No match, see if in literals
- if lexdata[lexpos] in self.lexliterals:
- tok = LexToken()
- tok.value = lexdata[lexpos]
- tok.lineno = self.lineno
- tok.type = tok.value
- tok.lexpos = lexpos
- self.lexpos = lexpos + 1
- return tok
-
- # No match. Call t_error() if defined.
- if self.lexerrorf:
- tok = LexToken()
- tok.value = self.lexdata[lexpos:]
- tok.lineno = self.lineno
- tok.type = "error"
- tok.lexer = self
- tok.lexpos = lexpos
- self.lexpos = lexpos
- newtok = self.lexerrorf(tok)
- if lexpos == self.lexpos:
- # Error method didn't change text position at all. This is an error.
- raise LexError("Scanning error. Illegal character '%s'" % (lexdata[lexpos]), lexdata[lexpos:])
- lexpos = self.lexpos
- if not newtok: continue
- return newtok
-
- self.lexpos = lexpos
- raise LexError("Illegal character '%s' at index %d" % (lexdata[lexpos],lexpos), lexdata[lexpos:])
-
- self.lexpos = lexpos + 1
- if self.lexdata is None:
- raise RuntimeError("No input string given with input()")
- return None
-
- # Iterator interface
- def __iter__(self):
- return self
-
- def next(self):
- t = self.token()
- if t is None:
- raise StopIteration
- return t
-
- __next__ = next
-
-# -----------------------------------------------------------------------------
-# ==== Lex Builder ===
-#
-# The functions and classes below are used to collect lexing information
-# and build a Lexer object from it.
-# -----------------------------------------------------------------------------
-
-# -----------------------------------------------------------------------------
-# get_caller_module_dict()
-#
-# This function returns a dictionary containing all of the symbols defined within
-# a caller further down the call stack. This is used to get the environment
-# associated with the yacc() call if none was provided.
-# -----------------------------------------------------------------------------
-
-def get_caller_module_dict(levels):
- try:
- raise RuntimeError
- except RuntimeError:
- e,b,t = sys.exc_info()
- f = t.tb_frame
- while levels > 0:
- f = f.f_back
- levels -= 1
- ldict = f.f_globals.copy()
- if f.f_globals != f.f_locals:
- ldict.update(f.f_locals)
-
- return ldict
-
-# -----------------------------------------------------------------------------
-# _funcs_to_names()
-#
-# Given a list of regular expression functions, this converts it to a list
-# suitable for output to a table file
-# -----------------------------------------------------------------------------
-
-def _funcs_to_names(funclist,namelist):
- result = []
- for f,name in zip(funclist,namelist):
- if f and f[0]:
- result.append((name, f[1]))
- else:
- result.append(f)
- return result
-
-# -----------------------------------------------------------------------------
-# _names_to_funcs()
-#
-# Given a list of regular expression function names, this converts it back to
-# functions.
-# -----------------------------------------------------------------------------
-
-def _names_to_funcs(namelist,fdict):
- result = []
- for n in namelist:
- if n and n[0]:
- result.append((fdict[n[0]],n[1]))
- else:
- result.append(n)
- return result
-
-# -----------------------------------------------------------------------------
-# _form_master_re()
-#
-# This function takes a list of all of the regex components and attempts to
-# form the master regular expression. Given limitations in the Python re
-# module, it may be necessary to break the master regex into separate expressions.
-# -----------------------------------------------------------------------------
-
-def _form_master_re(relist,reflags,ldict,toknames):
- if not relist: return []
- regex = "|".join(relist)
- try:
- lexre = re.compile(regex,re.VERBOSE | reflags)
-
- # Build the index to function map for the matching engine
- lexindexfunc = [ None ] * (max(lexre.groupindex.values())+1)
- lexindexnames = lexindexfunc[:]
-
- for f,i in lexre.groupindex.items():
- handle = ldict.get(f,None)
- if type(handle) in (types.FunctionType, types.MethodType):
- lexindexfunc[i] = (handle,toknames[f])
- lexindexnames[i] = f
- elif handle is not None:
- lexindexnames[i] = f
- if f.find("ignore_") > 0:
- lexindexfunc[i] = (None,None)
- else:
- lexindexfunc[i] = (None, toknames[f])
-
- return [(lexre,lexindexfunc)],[regex],[lexindexnames]
- except Exception:
- m = int(len(relist)/2)
- if m == 0: m = 1
- llist, lre, lnames = _form_master_re(relist[:m],reflags,ldict,toknames)
- rlist, rre, rnames = _form_master_re(relist[m:],reflags,ldict,toknames)
- return llist+rlist, lre+rre, lnames+rnames
-
-# -----------------------------------------------------------------------------
-# def _statetoken(s,names)
-#
-# Given a declaration name s of the form "t_" and a dictionary whose keys are
-# state names, this function returns a tuple (states,tokenname) where states
-# is a tuple of state names and tokenname is the name of the token. For example,
-# calling this with s = "t_foo_bar_SPAM" might return (('foo','bar'),'SPAM')
-# -----------------------------------------------------------------------------
-
-def _statetoken(s,names):
- nonstate = 1
- parts = s.split("_")
- for i in range(1,len(parts)):
- if not parts[i] in names and parts[i] != 'ANY': break
- if i > 1:
- states = tuple(parts[1:i])
- else:
- states = ('INITIAL',)
-
- if 'ANY' in states:
- states = tuple(names)
-
- tokenname = "_".join(parts[i:])
- return (states,tokenname)
-
-
-# -----------------------------------------------------------------------------
-# LexerReflect()
-#
-# This class represents information needed to build a lexer as extracted from a
-# user's input file.
-# -----------------------------------------------------------------------------
-class LexerReflect(object):
- def __init__(self,ldict,log=None,reflags=0):
- self.ldict = ldict
- self.error_func = None
- self.tokens = []
- self.reflags = reflags
- self.stateinfo = { 'INITIAL' : 'inclusive'}
- self.files = {}
- self.error = 0
-
- if log is None:
- self.log = PlyLogger(sys.stderr)
- else:
- self.log = log
-
- # Get all of the basic information
- def get_all(self):
- self.get_tokens()
- self.get_literals()
- self.get_states()
- self.get_rules()
-
- # Validate all of the information
- def validate_all(self):
- self.validate_tokens()
- self.validate_literals()
- self.validate_rules()
- return self.error
-
- # Get the tokens map
- def get_tokens(self):
- tokens = self.ldict.get("tokens",None)
- if not tokens:
- self.log.error("No token list is defined")
- self.error = 1
- return
-
- if not isinstance(tokens,(list, tuple)):
- self.log.error("tokens must be a list or tuple")
- self.error = 1
- return
-
- if not tokens:
- self.log.error("tokens is empty")
- self.error = 1
- return
-
- self.tokens = tokens
-
- # Validate the tokens
- def validate_tokens(self):
- terminals = {}
- for n in self.tokens:
- if not _is_identifier.match(n):
- self.log.error("Bad token name '%s'",n)
- self.error = 1
- if n in terminals:
- self.log.warning("Token '%s' multiply defined", n)
- terminals[n] = 1
-
- # Get the literals specifier
- def get_literals(self):
- self.literals = self.ldict.get("literals","")
-
- # Validate literals
- def validate_literals(self):
- try:
- for c in self.literals:
- if not isinstance(c,StringTypes) or len(c) > 1:
- self.log.error("Invalid literal %s. Must be a single character", repr(c))
- self.error = 1
- continue
-
- except TypeError:
- self.log.error("Invalid literals specification. literals must be a sequence of characters")
- self.error = 1
-
- def get_states(self):
- self.states = self.ldict.get("states",None)
- # Build statemap
- if self.states:
- if not isinstance(self.states,(tuple,list)):
- self.log.error("states must be defined as a tuple or list")
- self.error = 1
- else:
- for s in self.states:
- if not isinstance(s,tuple) or len(s) != 2:
- self.log.error("Invalid state specifier %s. Must be a tuple (statename,'exclusive|inclusive')",repr(s))
- self.error = 1
- continue
- name, statetype = s
- if not isinstance(name,StringTypes):
- self.log.error("State name %s must be a string", repr(name))
- self.error = 1
- continue
- if not (statetype == 'inclusive' or statetype == 'exclusive'):
- self.log.error("State type for state %s must be 'inclusive' or 'exclusive'",name)
- self.error = 1
- continue
- if name in self.stateinfo:
- self.log.error("State '%s' already defined",name)
- self.error = 1
- continue
- self.stateinfo[name] = statetype
-
- # Get all of the symbols with a t_ prefix and sort them into various
- # categories (functions, strings, error functions, and ignore characters)
-
- def get_rules(self):
- tsymbols = [f for f in self.ldict if f[:2] == 't_' ]
-
- # Now build up a list of functions and a list of strings
-
- self.toknames = { } # Mapping of symbols to token names
- self.funcsym = { } # Symbols defined as functions
- self.strsym = { } # Symbols defined as strings
- self.ignore = { } # Ignore strings by state
- self.errorf = { } # Error functions by state
-
- for s in self.stateinfo:
- self.funcsym[s] = []
- self.strsym[s] = []
-
- if len(tsymbols) == 0:
- self.log.error("No rules of the form t_rulename are defined")
- self.error = 1
- return
-
- for f in tsymbols:
- t = self.ldict[f]
- states, tokname = _statetoken(f,self.stateinfo)
- self.toknames[f] = tokname
-
- if hasattr(t,"__call__"):
- if tokname == 'error':
- for s in states:
- self.errorf[s] = t
- elif tokname == 'ignore':
- line = func_code(t).co_firstlineno
- file = func_code(t).co_filename
- self.log.error("%s:%d: Rule '%s' must be defined as a string",file,line,t.__name__)
- self.error = 1
- else:
- for s in states:
- self.funcsym[s].append((f,t))
- elif isinstance(t, StringTypes):
- if tokname == 'ignore':
- for s in states:
- self.ignore[s] = t
- if "\\" in t:
- self.log.warning("%s contains a literal backslash '\\'",f)
-
- elif tokname == 'error':
- self.log.error("Rule '%s' must be defined as a function", f)
- self.error = 1
- else:
- for s in states:
- self.strsym[s].append((f,t))
- else:
- self.log.error("%s not defined as a function or string", f)
- self.error = 1
-
- # Sort the functions by line number
- for f in self.funcsym.values():
- if sys.version_info[0] < 3:
- f.sort(lambda x,y: cmp(func_code(x[1]).co_firstlineno,func_code(y[1]).co_firstlineno))
- else:
- # Python 3.0
- f.sort(key=lambda x: func_code(x[1]).co_firstlineno)
-
- # Sort the strings by regular expression length
- for s in self.strsym.values():
- if sys.version_info[0] < 3:
- s.sort(lambda x,y: (len(x[1]) < len(y[1])) - (len(x[1]) > len(y[1])))
- else:
- # Python 3.0
- s.sort(key=lambda x: len(x[1]),reverse=True)
-
- # Validate all of the t_rules collected
- def validate_rules(self):
- for state in self.stateinfo:
- # Validate all rules defined by functions
-
-
-
- for fname, f in self.funcsym[state]:
- line = func_code(f).co_firstlineno
- file = func_code(f).co_filename
- self.files[file] = 1
-
- tokname = self.toknames[fname]
- if isinstance(f, types.MethodType):
- reqargs = 2
- else:
- reqargs = 1
- nargs = func_code(f).co_argcount
- if nargs > reqargs:
- self.log.error("%s:%d: Rule '%s' has too many arguments",file,line,f.__name__)
- self.error = 1
- continue
-
- if nargs < reqargs:
- self.log.error("%s:%d: Rule '%s' requires an argument", file,line,f.__name__)
- self.error = 1
- continue
-
- if not f.__doc__:
- self.log.error("%s:%d: No regular expression defined for rule '%s'",file,line,f.__name__)
- self.error = 1
- continue
-
- try:
- c = re.compile("(?P<%s>%s)" % (fname,f.__doc__), re.VERBOSE | self.reflags)
- if c.match(""):
- self.log.error("%s:%d: Regular expression for rule '%s' matches empty string", file,line,f.__name__)
- self.error = 1
- except re.error:
- _etype, e, _etrace = sys.exc_info()
- self.log.error("%s:%d: Invalid regular expression for rule '%s'. %s", file,line,f.__name__,e)
- if '#' in f.__doc__:
- self.log.error("%s:%d. Make sure '#' in rule '%s' is escaped with '\\#'",file,line, f.__name__)
- self.error = 1
-
- # Validate all rules defined by strings
- for name,r in self.strsym[state]:
- tokname = self.toknames[name]
- if tokname == 'error':
- self.log.error("Rule '%s' must be defined as a function", name)
- self.error = 1
- continue
-
- if not tokname in self.tokens and tokname.find("ignore_") < 0:
- self.log.error("Rule '%s' defined for an unspecified token %s",name,tokname)
- self.error = 1
- continue
-
- try:
- c = re.compile("(?P<%s>%s)" % (name,r),re.VERBOSE | self.reflags)
- if (c.match("")):
- self.log.error("Regular expression for rule '%s' matches empty string",name)
- self.error = 1
- except re.error:
- _etype, e, _etrace = sys.exc_info()
- self.log.error("Invalid regular expression for rule '%s'. %s",name,e)
- if '#' in r:
- self.log.error("Make sure '#' in rule '%s' is escaped with '\\#'",name)
- self.error = 1
-
- if not self.funcsym[state] and not self.strsym[state]:
- self.log.error("No rules defined for state '%s'",state)
- self.error = 1
-
- # Validate the error function
- efunc = self.errorf.get(state,None)
- if efunc:
- f = efunc
- line = func_code(f).co_firstlineno
- file = func_code(f).co_filename
- self.files[file] = 1
-
- if isinstance(f, types.MethodType):
- reqargs = 2
- else:
- reqargs = 1
- nargs = func_code(f).co_argcount
- if nargs > reqargs:
- self.log.error("%s:%d: Rule '%s' has too many arguments",file,line,f.__name__)
- self.error = 1
-
- if nargs < reqargs:
- self.log.error("%s:%d: Rule '%s' requires an argument", file,line,f.__name__)
- self.error = 1
-
- for f in self.files:
- self.validate_file(f)
-
-
- # -----------------------------------------------------------------------------
- # validate_file()
- #
- # This checks to see if there are duplicated t_rulename() functions or strings
- # in the parser input file. This is done using a simple regular expression
- # match on each line in the given file.
- # -----------------------------------------------------------------------------
-
- def validate_file(self,filename):
- import os.path
- base,ext = os.path.splitext(filename)
- if ext != '.py': return # No idea what the file is. Return OK
-
- try:
- f = open(filename)
- lines = f.readlines()
- f.close()
- except IOError:
- return # Couldn't find the file. Don't worry about it
-
- fre = re.compile(r'\s*def\s+(t_[a-zA-Z_0-9]*)\(')
- sre = re.compile(r'\s*(t_[a-zA-Z_0-9]*)\s*=')
-
- counthash = { }
- linen = 1
- for l in lines:
- m = fre.match(l)
- if not m:
- m = sre.match(l)
- if m:
- name = m.group(1)
- prev = counthash.get(name)
- if not prev:
- counthash[name] = linen
- else:
- self.log.error("%s:%d: Rule %s redefined. Previously defined on line %d",filename,linen,name,prev)
- self.error = 1
- linen += 1
-
-# -----------------------------------------------------------------------------
-# lex(module)
-#
-# Build all of the regular expression rules from definitions in the supplied module
-# -----------------------------------------------------------------------------
-def lex(module=None,object=None,debug=0,optimize=0,lextab="lextab",reflags=0,nowarn=0,outputdir="", debuglog=None, errorlog=None):
- global lexer
- ldict = None
- stateinfo = { 'INITIAL' : 'inclusive'}
- lexobj = Lexer()
- lexobj.lexoptimize = optimize
- global token,input
-
- if errorlog is None:
- errorlog = PlyLogger(sys.stderr)
-
- if debug:
- if debuglog is None:
- debuglog = PlyLogger(sys.stderr)
-
- # Get the module dictionary used for the lexer
- if object: module = object
-
- if module:
- _items = [(k,getattr(module,k)) for k in dir(module)]
- ldict = dict(_items)
- else:
- ldict = get_caller_module_dict(2)
-
- # Collect parser information from the dictionary
- linfo = LexerReflect(ldict,log=errorlog,reflags=reflags)
- linfo.get_all()
- if not optimize:
- if linfo.validate_all():
- raise SyntaxError("Can't build lexer")
-
- if optimize and lextab:
- try:
- lexobj.readtab(lextab,ldict)
- token = lexobj.token
- input = lexobj.input
- lexer = lexobj
- return lexobj
-
- except ImportError:
- pass
-
- # Dump some basic debugging information
- if debug:
- debuglog.info("lex: tokens = %r", linfo.tokens)
- debuglog.info("lex: literals = %r", linfo.literals)
- debuglog.info("lex: states = %r", linfo.stateinfo)
-
- # Build a dictionary of valid token names
- lexobj.lextokens = { }
- for n in linfo.tokens:
- lexobj.lextokens[n] = 1
-
- # Get literals specification
- if isinstance(linfo.literals,(list,tuple)):
- lexobj.lexliterals = type(linfo.literals[0])().join(linfo.literals)
- else:
- lexobj.lexliterals = linfo.literals
-
- # Get the stateinfo dictionary
- stateinfo = linfo.stateinfo
-
- regexs = { }
- # Build the master regular expressions
- for state in stateinfo:
- regex_list = []
-
- # Add rules defined by functions first
- for fname, f in linfo.funcsym[state]:
- line = func_code(f).co_firstlineno
- file = func_code(f).co_filename
- regex_list.append("(?P<%s>%s)" % (fname,f.__doc__))
- if debug:
- debuglog.info("lex: Adding rule %s -> '%s' (state '%s')",fname,f.__doc__, state)
-
- # Now add all of the simple rules
- for name,r in linfo.strsym[state]:
- regex_list.append("(?P<%s>%s)" % (name,r))
- if debug:
- debuglog.info("lex: Adding rule %s -> '%s' (state '%s')",name,r, state)
-
- regexs[state] = regex_list
-
- # Build the master regular expressions
-
- if debug:
- debuglog.info("lex: ==== MASTER REGEXS FOLLOW ====")
-
- for state in regexs:
- lexre, re_text, re_names = _form_master_re(regexs[state],reflags,ldict,linfo.toknames)
- lexobj.lexstatere[state] = lexre
- lexobj.lexstateretext[state] = re_text
- lexobj.lexstaterenames[state] = re_names
- if debug:
- for i in range(len(re_text)):
- debuglog.info("lex: state '%s' : regex[%d] = '%s'",state, i, re_text[i])
-
- # For inclusive states, we need to add the regular expressions from the INITIAL state
- for state,stype in stateinfo.items():
- if state != "INITIAL" and stype == 'inclusive':
- lexobj.lexstatere[state].extend(lexobj.lexstatere['INITIAL'])
- lexobj.lexstateretext[state].extend(lexobj.lexstateretext['INITIAL'])
- lexobj.lexstaterenames[state].extend(lexobj.lexstaterenames['INITIAL'])
-
- lexobj.lexstateinfo = stateinfo
- lexobj.lexre = lexobj.lexstatere["INITIAL"]
- lexobj.lexretext = lexobj.lexstateretext["INITIAL"]
- lexobj.lexreflags = reflags
-
- # Set up ignore variables
- lexobj.lexstateignore = linfo.ignore
- lexobj.lexignore = lexobj.lexstateignore.get("INITIAL","")
-
- # Set up error functions
- lexobj.lexstateerrorf = linfo.errorf
- lexobj.lexerrorf = linfo.errorf.get("INITIAL",None)
- if not lexobj.lexerrorf:
- errorlog.warning("No t_error rule is defined")
-
- # Check state information for ignore and error rules
- for s,stype in stateinfo.items():
- if stype == 'exclusive':
- if not s in linfo.errorf:
- errorlog.warning("No error rule is defined for exclusive state '%s'", s)
- if not s in linfo.ignore and lexobj.lexignore:
- errorlog.warning("No ignore rule is defined for exclusive state '%s'", s)
- elif stype == 'inclusive':
- if not s in linfo.errorf:
- linfo.errorf[s] = linfo.errorf.get("INITIAL",None)
- if not s in linfo.ignore:
- linfo.ignore[s] = linfo.ignore.get("INITIAL","")
-
- # Create global versions of the token() and input() functions
- token = lexobj.token
- input = lexobj.input
- lexer = lexobj
-
- # If in optimize mode, we write the lextab
- if lextab and optimize:
- lexobj.writetab(lextab,outputdir)
-
- return lexobj
-
-# -----------------------------------------------------------------------------
-# runmain()
-#
-# This runs the lexer as a main program
-# -----------------------------------------------------------------------------
-
-def runmain(lexer=None,data=None):
- if not data:
- try:
- filename = sys.argv[1]
- f = open(filename)
- data = f.read()
- f.close()
- except IndexError:
- sys.stdout.write("Reading from standard input (type EOF to end):\n")
- data = sys.stdin.read()
-
- if lexer:
- _input = lexer.input
- else:
- _input = input
- _input(data)
- if lexer:
- _token = lexer.token
- else:
- _token = token
-
- while 1:
- tok = _token()
- if not tok: break
- sys.stdout.write("(%s,%r,%d,%d)\n" % (tok.type, tok.value, tok.lineno,tok.lexpos))
-
-# -----------------------------------------------------------------------------
-# @TOKEN(regex)
-#
-# This decorator function can be used to set the regex expression on a function
-# when its docstring might need to be set in an alternative way
-# -----------------------------------------------------------------------------
-
-def TOKEN(r):
- def set_doc(f):
- if hasattr(r,"__call__"):
- f.__doc__ = r.__doc__
- else:
- f.__doc__ = r
- return f
- return set_doc
-
-# Alternative spelling of the TOKEN decorator
-Token = TOKEN
-
diff --git a/bitbake/lib/ply/yacc.py b/bitbake/lib/ply/yacc.py
deleted file mode 100644
index 6168fd9a03..0000000000
--- a/bitbake/lib/ply/yacc.py
+++ /dev/null
@@ -1,3276 +0,0 @@
-# -----------------------------------------------------------------------------
-# ply: yacc.py
-#
-# Copyright (C) 2001-2009,
-# David M. Beazley (Dabeaz LLC)
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright notice,
-# this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above copyright notice,
-# this list of conditions and the following disclaimer in the documentation
-# and/or other materials provided with the distribution.
-# * Neither the name of the David Beazley or Dabeaz LLC may be used to
-# endorse or promote products derived from this software without
-# specific prior written permission.
-#
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-# -----------------------------------------------------------------------------
-#
-# This implements an LR parser that is constructed from grammar rules defined
-# as Python functions. The grammer is specified by supplying the BNF inside
-# Python documentation strings. The inspiration for this technique was borrowed
-# from John Aycock's Spark parsing system. PLY might be viewed as cross between
-# Spark and the GNU bison utility.
-#
-# The current implementation is only somewhat object-oriented. The
-# LR parser itself is defined in terms of an object (which allows multiple
-# parsers to co-exist). However, most of the variables used during table
-# construction are defined in terms of global variables. Users shouldn't
-# notice unless they are trying to define multiple parsers at the same
-# time using threads (in which case they should have their head examined).
-#
-# This implementation supports both SLR and LALR(1) parsing. LALR(1)
-# support was originally implemented by Elias Ioup (ezioup@alumni.uchicago.edu),
-# using the algorithm found in Aho, Sethi, and Ullman "Compilers: Principles,
-# Techniques, and Tools" (The Dragon Book). LALR(1) has since been replaced
-# by the more efficient DeRemer and Pennello algorithm.
-#
-# :::::::: WARNING :::::::
-#
-# Construction of LR parsing tables is fairly complicated and expensive.
-# To make this module run fast, a *LOT* of work has been put into
-# optimization---often at the expensive of readability and what might
-# consider to be good Python "coding style." Modify the code at your
-# own risk!
-# ----------------------------------------------------------------------------
-
-__version__ = "3.3"
-__tabversion__ = "3.2" # Table version
-
-#-----------------------------------------------------------------------------
-# === User configurable parameters ===
-#
-# Change these to modify the default behavior of yacc (if you wish)
-#-----------------------------------------------------------------------------
-
-yaccdebug = 0 # Debugging mode. If set, yacc generates a
- # a 'parser.out' file in the current directory
-
-debug_file = 'parser.out' # Default name of the debugging file
-tab_module = 'parsetab' # Default name of the table module
-default_lr = 'LALR' # Default LR table generation method
-
-error_count = 3 # Number of symbols that must be shifted to leave recovery mode
-
-yaccdevel = 0 # Set to True if developing yacc. This turns off optimized
- # implementations of certain functions.
-
-resultlimit = 40 # Size limit of results when running in debug mode.
-
-pickle_protocol = 0 # Protocol to use when writing pickle files
-
-import re, types, sys, os.path
-
-# Compatibility function for python 2.6/3.0
-if sys.version_info[0] < 3:
- def func_code(f):
- return f.func_code
-else:
- def func_code(f):
- return f.__code__
-
-# Compatibility
-try:
- MAXINT = sys.maxint
-except AttributeError:
- MAXINT = sys.maxsize
-
-# Python 2.x/3.0 compatibility.
-def load_ply_lex():
- if sys.version_info[0] < 3:
- import lex
- else:
- import ply.lex as lex
- return lex
-
-# This object is a stand-in for a logging object created by the
-# logging module. PLY will use this by default to create things
-# such as the parser.out file. If a user wants more detailed
-# information, they can create their own logging object and pass
-# it into PLY.
-
-class PlyLogger(object):
- def __init__(self,f):
- self.f = f
- def debug(self,msg,*args,**kwargs):
- self.f.write((msg % args) + "\n")
- info = debug
-
- def warning(self,msg,*args,**kwargs):
- self.f.write("WARNING: "+ (msg % args) + "\n")
-
- def error(self,msg,*args,**kwargs):
- self.f.write("ERROR: " + (msg % args) + "\n")
-
- critical = debug
-
-# Null logger is used when no output is generated. Does nothing.
-class NullLogger(object):
- def __getattribute__(self,name):
- return self
- def __call__(self,*args,**kwargs):
- return self
-
-# Exception raised for yacc-related errors
-class YaccError(Exception): pass
-
-# Format the result message that the parser produces when running in debug mode.
-def format_result(r):
- repr_str = repr(r)
- if '\n' in repr_str: repr_str = repr(repr_str)
- if len(repr_str) > resultlimit:
- repr_str = repr_str[:resultlimit]+" ..."
- result = "<%s @ 0x%x> (%s)" % (type(r).__name__,id(r),repr_str)
- return result
-
-
-# Format stack entries when the parser is running in debug mode
-def format_stack_entry(r):
- repr_str = repr(r)
- if '\n' in repr_str: repr_str = repr(repr_str)
- if len(repr_str) < 16:
- return repr_str
- else:
- return "<%s @ 0x%x>" % (type(r).__name__,id(r))
-
-#-----------------------------------------------------------------------------
-# === LR Parsing Engine ===
-#
-# The following classes are used for the LR parser itself. These are not
-# used during table construction and are independent of the actual LR
-# table generation algorithm
-#-----------------------------------------------------------------------------
-
-# This class is used to hold non-terminal grammar symbols during parsing.
-# It normally has the following attributes set:
-# .type = Grammar symbol type
-# .value = Symbol value
-# .lineno = Starting line number
-# .endlineno = Ending line number (optional, set automatically)
-# .lexpos = Starting lex position
-# .endlexpos = Ending lex position (optional, set automatically)
-
-class YaccSymbol:
- def __str__(self): return self.type
- def __repr__(self): return str(self)
-
-# This class is a wrapper around the objects actually passed to each
-# grammar rule. Index lookup and assignment actually assign the
-# .value attribute of the underlying YaccSymbol object.
-# The lineno() method returns the line number of a given
-# item (or 0 if not defined). The linespan() method returns
-# a tuple of (startline,endline) representing the range of lines
-# for a symbol. The lexspan() method returns a tuple (lexpos,endlexpos)
-# representing the range of positional information for a symbol.
-
-class YaccProduction:
- def __init__(self,s,stack=None):
- self.slice = s
- self.stack = stack
- self.lexer = None
- self.parser= None
- def __getitem__(self,n):
- if n >= 0: return self.slice[n].value
- else: return self.stack[n].value
-
- def __setitem__(self,n,v):
- self.slice[n].value = v
-
- def __getslice__(self,i,j):
- return [s.value for s in self.slice[i:j]]
-
- def __len__(self):
- return len(self.slice)
-
- def lineno(self,n):
- return getattr(self.slice[n],"lineno",0)
-
- def set_lineno(self,n,lineno):
- self.slice[n].lineno = lineno
-
- def linespan(self,n):
- startline = getattr(self.slice[n],"lineno",0)
- endline = getattr(self.slice[n],"endlineno",startline)
- return startline,endline
-
- def lexpos(self,n):
- return getattr(self.slice[n],"lexpos",0)
-
- def lexspan(self,n):
- startpos = getattr(self.slice[n],"lexpos",0)
- endpos = getattr(self.slice[n],"endlexpos",startpos)
- return startpos,endpos
-
- def error(self):
- raise SyntaxError
-
-
-# -----------------------------------------------------------------------------
-# == LRParser ==
-#
-# The LR Parsing engine.
-# -----------------------------------------------------------------------------
-
-class LRParser:
- def __init__(self,lrtab,errorf):
- self.productions = lrtab.lr_productions
- self.action = lrtab.lr_action
- self.goto = lrtab.lr_goto
- self.errorfunc = errorf
-
- def errok(self):
- self.errorok = 1
-
- def restart(self):
- del self.statestack[:]
- del self.symstack[:]
- sym = YaccSymbol()
- sym.type = '$end'
- self.symstack.append(sym)
- self.statestack.append(0)
-
- def parse(self,input=None,lexer=None,debug=0,tracking=0,tokenfunc=None):
- if debug or yaccdevel:
- if isinstance(debug,int):
- debug = PlyLogger(sys.stderr)
- return self.parsedebug(input,lexer,debug,tracking,tokenfunc)
- elif tracking:
- return self.parseopt(input,lexer,debug,tracking,tokenfunc)
- else:
- return self.parseopt_notrack(input,lexer,debug,tracking,tokenfunc)
-
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # parsedebug().
- #
- # This is the debugging enabled version of parse(). All changes made to the
- # parsing engine should be made here. For the non-debugging version,
- # copy this code to a method parseopt() and delete all of the sections
- # enclosed in:
- #
- # #--! DEBUG
- # statements
- # #--! DEBUG
- #
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- def parsedebug(self,input=None,lexer=None,debug=None,tracking=0,tokenfunc=None):
- lookahead = None # Current lookahead symbol
- lookaheadstack = [ ] # Stack of lookahead symbols
- actions = self.action # Local reference to action table (to avoid lookup on self.)
- goto = self.goto # Local reference to goto table (to avoid lookup on self.)
- prod = self.productions # Local reference to production list (to avoid lookup on self.)
- pslice = YaccProduction(None) # Production object passed to grammar rules
- errorcount = 0 # Used during error recovery
-
- # --! DEBUG
- debug.info("PLY: PARSE DEBUG START")
- # --! DEBUG
-
- # If no lexer was given, we will try to use the lex module
- if not lexer:
- lex = load_ply_lex()
- lexer = lex.lexer
-
- # Set up the lexer and parser objects on pslice
- pslice.lexer = lexer
- pslice.parser = self
-
- # If input was supplied, pass to lexer
- if input is not None:
- lexer.input(input)
-
- if tokenfunc is None:
- # Tokenize function
- get_token = lexer.token
- else:
- get_token = tokenfunc
-
- # Set up the state and symbol stacks
-
- statestack = [ ] # Stack of parsing states
- self.statestack = statestack
- symstack = [ ] # Stack of grammar symbols
- self.symstack = symstack
-
- pslice.stack = symstack # Put in the production
- errtoken = None # Err token
-
- # The start state is assumed to be (0,$end)
-
- statestack.append(0)
- sym = YaccSymbol()
- sym.type = "$end"
- symstack.append(sym)
- state = 0
- while 1:
- # Get the next symbol on the input. If a lookahead symbol
- # is already set, we just use that. Otherwise, we'll pull
- # the next token off of the lookaheadstack or from the lexer
-
- # --! DEBUG
- debug.debug('')
- debug.debug('State : %s', state)
- # --! DEBUG
-
- if not lookahead:
- if not lookaheadstack:
- lookahead = get_token() # Get the next token
- else:
- lookahead = lookaheadstack.pop()
- if not lookahead:
- lookahead = YaccSymbol()
- lookahead.type = "$end"
-
- # --! DEBUG
- debug.debug('Stack : %s',
- ("%s . %s" % (" ".join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip())
- # --! DEBUG
-
- # Check the action table
- ltype = lookahead.type
- t = actions[state].get(ltype)
-
- if t is not None:
- if t > 0:
- # shift a symbol on the stack
- statestack.append(t)
- state = t
-
- # --! DEBUG
- debug.debug("Action : Shift and goto state %s", t)
- # --! DEBUG
-
- symstack.append(lookahead)
- lookahead = None
-
- # Decrease error count on successful shift
- if errorcount: errorcount -=1
- continue
-
- if t < 0:
- # reduce a symbol on the stack, emit a production
- p = prod[-t]
- pname = p.name
- plen = p.len
-
- # Get production function
- sym = YaccSymbol()
- sym.type = pname # Production name
- sym.value = None
-
- # --! DEBUG
- if plen:
- debug.info("Action : Reduce rule [%s] with %s and goto state %d", p.str, "["+",".join([format_stack_entry(_v.value) for _v in symstack[-plen:]])+"]",-t)
- else:
- debug.info("Action : Reduce rule [%s] with %s and goto state %d", p.str, [],-t)
-
- # --! DEBUG
-
- if plen:
- targ = symstack[-plen-1:]
- targ[0] = sym
-
- # --! TRACKING
- if tracking:
- t1 = targ[1]
- sym.lineno = t1.lineno
- sym.lexpos = t1.lexpos
- t1 = targ[-1]
- sym.endlineno = getattr(t1,"endlineno",t1.lineno)
- sym.endlexpos = getattr(t1,"endlexpos",t1.lexpos)
-
- # --! TRACKING
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # The code enclosed in this section is duplicated
- # below as a performance optimization. Make sure
- # changes get made in both locations.
-
- pslice.slice = targ
-
- try:
- # Call the grammar rule with our special slice object
- del symstack[-plen:]
- del statestack[-plen:]
- p.callable(pslice)
- # --! DEBUG
- debug.info("Result : %s", format_result(pslice[0]))
- # --! DEBUG
- symstack.append(sym)
- state = goto[statestack[-1]][pname]
- statestack.append(state)
- except SyntaxError:
- # If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead)
- symstack.pop()
- statestack.pop()
- state = statestack[-1]
- sym.type = 'error'
- lookahead = sym
- errorcount = error_count
- self.errorok = 0
- continue
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- else:
-
- # --! TRACKING
- if tracking:
- sym.lineno = lexer.lineno
- sym.lexpos = lexer.lexpos
- # --! TRACKING
-
- targ = [ sym ]
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # The code enclosed in this section is duplicated
- # above as a performance optimization. Make sure
- # changes get made in both locations.
-
- pslice.slice = targ
-
- try:
- # Call the grammar rule with our special slice object
- p.callable(pslice)
- # --! DEBUG
- debug.info("Result : %s", format_result(pslice[0]))
- # --! DEBUG
- symstack.append(sym)
- state = goto[statestack[-1]][pname]
- statestack.append(state)
- except SyntaxError:
- # If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead)
- symstack.pop()
- statestack.pop()
- state = statestack[-1]
- sym.type = 'error'
- lookahead = sym
- errorcount = error_count
- self.errorok = 0
- continue
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- if t == 0:
- n = symstack[-1]
- result = getattr(n,"value",None)
- # --! DEBUG
- debug.info("Done : Returning %s", format_result(result))
- debug.info("PLY: PARSE DEBUG END")
- # --! DEBUG
- return result
-
- if t == None:
-
- # --! DEBUG
- debug.error('Error : %s',
- ("%s . %s" % (" ".join([xx.type for xx in symstack][1:]), str(lookahead))).lstrip())
- # --! DEBUG
-
- # We have some kind of parsing error here. To handle
- # this, we are going to push the current token onto
- # the tokenstack and replace it with an 'error' token.
- # If there are any synchronization rules, they may
- # catch it.
- #
- # In addition to pushing the error token, we call call
- # the user defined p_error() function if this is the
- # first syntax error. This function is only called if
- # errorcount == 0.
- if errorcount == 0 or self.errorok:
- errorcount = error_count
- self.errorok = 0
- errtoken = lookahead
- if errtoken.type == "$end":
- errtoken = None # End of file!
- if self.errorfunc:
- global errok,token,restart
- errok = self.errok # Set some special functions available in error recovery
- token = get_token
- restart = self.restart
- if errtoken and not hasattr(errtoken,'lexer'):
- errtoken.lexer = lexer
- tok = self.errorfunc(errtoken)
- del errok, token, restart # Delete special functions
-
- if self.errorok:
- # User must have done some kind of panic
- # mode recovery on their own. The
- # returned token is the next lookahead
- lookahead = tok
- errtoken = None
- continue
- else:
- if errtoken:
- if hasattr(errtoken,"lineno"): lineno = lookahead.lineno
- else: lineno = 0
- if lineno:
- sys.stderr.write("yacc: Syntax error at line %d, token=%s\n" % (lineno, errtoken.type))
- else:
- sys.stderr.write("yacc: Syntax error, token=%s" % errtoken.type)
- else:
- sys.stderr.write("yacc: Parse error in input. EOF\n")
- return
-
- else:
- errorcount = error_count
-
- # case 1: the statestack only has 1 entry on it. If we're in this state, the
- # entire parse has been rolled back and we're completely hosed. The token is
- # discarded and we just keep going.
-
- if len(statestack) <= 1 and lookahead.type != "$end":
- lookahead = None
- errtoken = None
- state = 0
- # Nuke the pushback stack
- del lookaheadstack[:]
- continue
-
- # case 2: the statestack has a couple of entries on it, but we're
- # at the end of the file. nuke the top entry and generate an error token
-
- # Start nuking entries on the stack
- if lookahead.type == "$end":
- # Whoa. We're really hosed here. Bail out
- return
-
- if lookahead.type != 'error':
- sym = symstack[-1]
- if sym.type == 'error':
- # Hmmm. Error is on top of stack, we'll just nuke input
- # symbol and continue
- lookahead = None
- continue
- t = YaccSymbol()
- t.type = 'error'
- if hasattr(lookahead,"lineno"):
- t.lineno = lookahead.lineno
- t.value = lookahead
- lookaheadstack.append(lookahead)
- lookahead = t
- else:
- symstack.pop()
- statestack.pop()
- state = statestack[-1] # Potential bug fix
-
- continue
-
- # Call an error function here
- raise RuntimeError("yacc: internal parser error!!!\n")
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # parseopt().
- #
- # Optimized version of parse() method. DO NOT EDIT THIS CODE DIRECTLY.
- # Edit the debug version above, then copy any modifications to the method
- # below while removing #--! DEBUG sections.
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
-
- def parseopt(self,input=None,lexer=None,debug=0,tracking=0,tokenfunc=None):
- lookahead = None # Current lookahead symbol
- lookaheadstack = [ ] # Stack of lookahead symbols
- actions = self.action # Local reference to action table (to avoid lookup on self.)
- goto = self.goto # Local reference to goto table (to avoid lookup on self.)
- prod = self.productions # Local reference to production list (to avoid lookup on self.)
- pslice = YaccProduction(None) # Production object passed to grammar rules
- errorcount = 0 # Used during error recovery
-
- # If no lexer was given, we will try to use the lex module
- if not lexer:
- lex = load_ply_lex()
- lexer = lex.lexer
-
- # Set up the lexer and parser objects on pslice
- pslice.lexer = lexer
- pslice.parser = self
-
- # If input was supplied, pass to lexer
- if input is not None:
- lexer.input(input)
-
- if tokenfunc is None:
- # Tokenize function
- get_token = lexer.token
- else:
- get_token = tokenfunc
-
- # Set up the state and symbol stacks
-
- statestack = [ ] # Stack of parsing states
- self.statestack = statestack
- symstack = [ ] # Stack of grammar symbols
- self.symstack = symstack
-
- pslice.stack = symstack # Put in the production
- errtoken = None # Err token
-
- # The start state is assumed to be (0,$end)
-
- statestack.append(0)
- sym = YaccSymbol()
- sym.type = '$end'
- symstack.append(sym)
- state = 0
- while 1:
- # Get the next symbol on the input. If a lookahead symbol
- # is already set, we just use that. Otherwise, we'll pull
- # the next token off of the lookaheadstack or from the lexer
-
- if not lookahead:
- if not lookaheadstack:
- lookahead = get_token() # Get the next token
- else:
- lookahead = lookaheadstack.pop()
- if not lookahead:
- lookahead = YaccSymbol()
- lookahead.type = '$end'
-
- # Check the action table
- ltype = lookahead.type
- t = actions[state].get(ltype)
-
- if t is not None:
- if t > 0:
- # shift a symbol on the stack
- statestack.append(t)
- state = t
-
- symstack.append(lookahead)
- lookahead = None
-
- # Decrease error count on successful shift
- if errorcount: errorcount -=1
- continue
-
- if t < 0:
- # reduce a symbol on the stack, emit a production
- p = prod[-t]
- pname = p.name
- plen = p.len
-
- # Get production function
- sym = YaccSymbol()
- sym.type = pname # Production name
- sym.value = None
-
- if plen:
- targ = symstack[-plen-1:]
- targ[0] = sym
-
- # --! TRACKING
- if tracking:
- t1 = targ[1]
- sym.lineno = t1.lineno
- sym.lexpos = t1.lexpos
- t1 = targ[-1]
- sym.endlineno = getattr(t1,"endlineno",t1.lineno)
- sym.endlexpos = getattr(t1,"endlexpos",t1.lexpos)
-
- # --! TRACKING
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # The code enclosed in this section is duplicated
- # below as a performance optimization. Make sure
- # changes get made in both locations.
-
- pslice.slice = targ
-
- try:
- # Call the grammar rule with our special slice object
- del symstack[-plen:]
- del statestack[-plen:]
- p.callable(pslice)
- symstack.append(sym)
- state = goto[statestack[-1]][pname]
- statestack.append(state)
- except SyntaxError:
- # If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead)
- symstack.pop()
- statestack.pop()
- state = statestack[-1]
- sym.type = 'error'
- lookahead = sym
- errorcount = error_count
- self.errorok = 0
- continue
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- else:
-
- # --! TRACKING
- if tracking:
- sym.lineno = lexer.lineno
- sym.lexpos = lexer.lexpos
- # --! TRACKING
-
- targ = [ sym ]
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # The code enclosed in this section is duplicated
- # above as a performance optimization. Make sure
- # changes get made in both locations.
-
- pslice.slice = targ
-
- try:
- # Call the grammar rule with our special slice object
- p.callable(pslice)
- symstack.append(sym)
- state = goto[statestack[-1]][pname]
- statestack.append(state)
- except SyntaxError:
- # If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead)
- symstack.pop()
- statestack.pop()
- state = statestack[-1]
- sym.type = 'error'
- lookahead = sym
- errorcount = error_count
- self.errorok = 0
- continue
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- if t == 0:
- n = symstack[-1]
- return getattr(n,"value",None)
-
- if t == None:
-
- # We have some kind of parsing error here. To handle
- # this, we are going to push the current token onto
- # the tokenstack and replace it with an 'error' token.
- # If there are any synchronization rules, they may
- # catch it.
- #
- # In addition to pushing the error token, we call call
- # the user defined p_error() function if this is the
- # first syntax error. This function is only called if
- # errorcount == 0.
- if errorcount == 0 or self.errorok:
- errorcount = error_count
- self.errorok = 0
- errtoken = lookahead
- if errtoken.type == '$end':
- errtoken = None # End of file!
- if self.errorfunc:
- global errok,token,restart
- errok = self.errok # Set some special functions available in error recovery
- token = get_token
- restart = self.restart
- if errtoken and not hasattr(errtoken,'lexer'):
- errtoken.lexer = lexer
- tok = self.errorfunc(errtoken)
- del errok, token, restart # Delete special functions
-
- if self.errorok:
- # User must have done some kind of panic
- # mode recovery on their own. The
- # returned token is the next lookahead
- lookahead = tok
- errtoken = None
- continue
- else:
- if errtoken:
- if hasattr(errtoken,"lineno"): lineno = lookahead.lineno
- else: lineno = 0
- if lineno:
- sys.stderr.write("yacc: Syntax error at line %d, token=%s\n" % (lineno, errtoken.type))
- else:
- sys.stderr.write("yacc: Syntax error, token=%s" % errtoken.type)
- else:
- sys.stderr.write("yacc: Parse error in input. EOF\n")
- return
-
- else:
- errorcount = error_count
-
- # case 1: the statestack only has 1 entry on it. If we're in this state, the
- # entire parse has been rolled back and we're completely hosed. The token is
- # discarded and we just keep going.
-
- if len(statestack) <= 1 and lookahead.type != '$end':
- lookahead = None
- errtoken = None
- state = 0
- # Nuke the pushback stack
- del lookaheadstack[:]
- continue
-
- # case 2: the statestack has a couple of entries on it, but we're
- # at the end of the file. nuke the top entry and generate an error token
-
- # Start nuking entries on the stack
- if lookahead.type == '$end':
- # Whoa. We're really hosed here. Bail out
- return
-
- if lookahead.type != 'error':
- sym = symstack[-1]
- if sym.type == 'error':
- # Hmmm. Error is on top of stack, we'll just nuke input
- # symbol and continue
- lookahead = None
- continue
- t = YaccSymbol()
- t.type = 'error'
- if hasattr(lookahead,"lineno"):
- t.lineno = lookahead.lineno
- t.value = lookahead
- lookaheadstack.append(lookahead)
- lookahead = t
- else:
- symstack.pop()
- statestack.pop()
- state = statestack[-1] # Potential bug fix
-
- continue
-
- # Call an error function here
- raise RuntimeError("yacc: internal parser error!!!\n")
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # parseopt_notrack().
- #
- # Optimized version of parseopt() with line number tracking removed.
- # DO NOT EDIT THIS CODE DIRECTLY. Copy the optimized version and remove
- # code in the #--! TRACKING sections
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- def parseopt_notrack(self,input=None,lexer=None,debug=0,tracking=0,tokenfunc=None):
- lookahead = None # Current lookahead symbol
- lookaheadstack = [ ] # Stack of lookahead symbols
- actions = self.action # Local reference to action table (to avoid lookup on self.)
- goto = self.goto # Local reference to goto table (to avoid lookup on self.)
- prod = self.productions # Local reference to production list (to avoid lookup on self.)
- pslice = YaccProduction(None) # Production object passed to grammar rules
- errorcount = 0 # Used during error recovery
-
- # If no lexer was given, we will try to use the lex module
- if not lexer:
- lex = load_ply_lex()
- lexer = lex.lexer
-
- # Set up the lexer and parser objects on pslice
- pslice.lexer = lexer
- pslice.parser = self
-
- # If input was supplied, pass to lexer
- if input is not None:
- lexer.input(input)
-
- if tokenfunc is None:
- # Tokenize function
- get_token = lexer.token
- else:
- get_token = tokenfunc
-
- # Set up the state and symbol stacks
-
- statestack = [ ] # Stack of parsing states
- self.statestack = statestack
- symstack = [ ] # Stack of grammar symbols
- self.symstack = symstack
-
- pslice.stack = symstack # Put in the production
- errtoken = None # Err token
-
- # The start state is assumed to be (0,$end)
-
- statestack.append(0)
- sym = YaccSymbol()
- sym.type = '$end'
- symstack.append(sym)
- state = 0
- while 1:
- # Get the next symbol on the input. If a lookahead symbol
- # is already set, we just use that. Otherwise, we'll pull
- # the next token off of the lookaheadstack or from the lexer
-
- if not lookahead:
- if not lookaheadstack:
- lookahead = get_token() # Get the next token
- else:
- lookahead = lookaheadstack.pop()
- if not lookahead:
- lookahead = YaccSymbol()
- lookahead.type = '$end'
-
- # Check the action table
- ltype = lookahead.type
- t = actions[state].get(ltype)
-
- if t is not None:
- if t > 0:
- # shift a symbol on the stack
- statestack.append(t)
- state = t
-
- symstack.append(lookahead)
- lookahead = None
-
- # Decrease error count on successful shift
- if errorcount: errorcount -=1
- continue
-
- if t < 0:
- # reduce a symbol on the stack, emit a production
- p = prod[-t]
- pname = p.name
- plen = p.len
-
- # Get production function
- sym = YaccSymbol()
- sym.type = pname # Production name
- sym.value = None
-
- if plen:
- targ = symstack[-plen-1:]
- targ[0] = sym
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # The code enclosed in this section is duplicated
- # below as a performance optimization. Make sure
- # changes get made in both locations.
-
- pslice.slice = targ
-
- try:
- # Call the grammar rule with our special slice object
- del symstack[-plen:]
- del statestack[-plen:]
- p.callable(pslice)
- symstack.append(sym)
- state = goto[statestack[-1]][pname]
- statestack.append(state)
- except SyntaxError:
- # If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead)
- symstack.pop()
- statestack.pop()
- state = statestack[-1]
- sym.type = 'error'
- lookahead = sym
- errorcount = error_count
- self.errorok = 0
- continue
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- else:
-
- targ = [ sym ]
-
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
- # The code enclosed in this section is duplicated
- # above as a performance optimization. Make sure
- # changes get made in both locations.
-
- pslice.slice = targ
-
- try:
- # Call the grammar rule with our special slice object
- p.callable(pslice)
- symstack.append(sym)
- state = goto[statestack[-1]][pname]
- statestack.append(state)
- except SyntaxError:
- # If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead)
- symstack.pop()
- statestack.pop()
- state = statestack[-1]
- sym.type = 'error'
- lookahead = sym
- errorcount = error_count
- self.errorok = 0
- continue
- # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-
- if t == 0:
- n = symstack[-1]
- return getattr(n,"value",None)
-
- if t == None:
-
- # We have some kind of parsing error here. To handle
- # this, we are going to push the current token onto
- # the tokenstack and replace it with an 'error' token.
- # If there are any synchronization rules, they may
- # catch it.
- #
- # In addition to pushing the error token, we call call
- # the user defined p_error() function if this is the
- # first syntax error. This function is only called if
- # errorcount == 0.
- if errorcount == 0 or self.errorok:
- errorcount = error_count
- self.errorok = 0
- errtoken = lookahead
- if errtoken.type == '$end':
- errtoken = None # End of file!
- if self.errorfunc:
- global errok,token,restart
- errok = self.errok # Set some special functions available in error recovery
- token = get_token
- restart = self.restart
- if errtoken and not hasattr(errtoken,'lexer'):
- errtoken.lexer = lexer
- tok = self.errorfunc(errtoken)
- del errok, token, restart # Delete special functions
-
- if self.errorok:
- # User must have done some kind of panic
- # mode recovery on their own. The
- # returned token is the next lookahead
- lookahead = tok
- errtoken = None
- continue
- else:
- if errtoken:
- if hasattr(errtoken,"lineno"): lineno = lookahead.lineno
- else: lineno = 0
- if lineno:
- sys.stderr.write("yacc: Syntax error at line %d, token=%s\n" % (lineno, errtoken.type))
- else:
- sys.stderr.write("yacc: Syntax error, token=%s" % errtoken.type)
- else:
- sys.stderr.write("yacc: Parse error in input. EOF\n")
- return
-
- else:
- errorcount = error_count
-
- # case 1: the statestack only has 1 entry on it. If we're in this state, the
- # entire parse has been rolled back and we're completely hosed. The token is
- # discarded and we just keep going.
-
- if len(statestack) <= 1 and lookahead.type != '$end':
- lookahead = None
- errtoken = None
- state = 0
- # Nuke the pushback stack
- del lookaheadstack[:]
- continue
-
- # case 2: the statestack has a couple of entries on it, but we're
- # at the end of the file. nuke the top entry and generate an error token
-
- # Start nuking entries on the stack
- if lookahead.type == '$end':
- # Whoa. We're really hosed here. Bail out
- return
-
- if lookahead.type != 'error':
- sym = symstack[-1]
- if sym.type == 'error':
- # Hmmm. Error is on top of stack, we'll just nuke input
- # symbol and continue
- lookahead = None
- continue
- t = YaccSymbol()
- t.type = 'error'
- if hasattr(lookahead,"lineno"):
- t.lineno = lookahead.lineno
- t.value = lookahead
- lookaheadstack.append(lookahead)
- lookahead = t
- else:
- symstack.pop()
- statestack.pop()
- state = statestack[-1] # Potential bug fix
-
- continue
-
- # Call an error function here
- raise RuntimeError("yacc: internal parser error!!!\n")
-
-# -----------------------------------------------------------------------------
-# === Grammar Representation ===
-#
-# The following functions, classes, and variables are used to represent and
-# manipulate the rules that make up a grammar.
-# -----------------------------------------------------------------------------
-
-import re
-
-# regex matching identifiers
-_is_identifier = re.compile(r'^[a-zA-Z0-9_-]+$')
-
-# -----------------------------------------------------------------------------
-# class Production:
-#
-# This class stores the raw information about a single production or grammar rule.
-# A grammar rule refers to a specification such as this:
-#
-# expr : expr PLUS term
-#
-# Here are the basic attributes defined on all productions
-#
-# name - Name of the production. For example 'expr'
-# prod - A list of symbols on the right side ['expr','PLUS','term']
-# prec - Production precedence level
-# number - Production number.
-# func - Function that executes on reduce
-# file - File where production function is defined
-# lineno - Line number where production function is defined
-#
-# The following attributes are defined or optional.
-#
-# len - Length of the production (number of symbols on right hand side)
-# usyms - Set of unique symbols found in the production
-# -----------------------------------------------------------------------------
-
-class Production(object):
- reduced = 0
- def __init__(self,number,name,prod,precedence=('right',0),func=None,file='',line=0):
- self.name = name
- self.prod = tuple(prod)
- self.number = number
- self.func = func
- self.callable = None
- self.file = file
- self.line = line
- self.prec = precedence
-
- # Internal settings used during table construction
-
- self.len = len(self.prod) # Length of the production
-
- # Create a list of unique production symbols used in the production
- self.usyms = [ ]
- for s in self.prod:
- if s not in self.usyms:
- self.usyms.append(s)
-
- # List of all LR items for the production
- self.lr_items = []
- self.lr_next = None
-
- # Create a string representation
- if self.prod:
- self.str = "%s -> %s" % (self.name," ".join(self.prod))
- else:
- self.str = "%s -> <empty>" % self.name
-
- def __str__(self):
- return self.str
-
- def __repr__(self):
- return "Production("+str(self)+")"
-
- def __len__(self):
- return len(self.prod)
-
- def __nonzero__(self):
- return 1
-
- def __getitem__(self,index):
- return self.prod[index]
-
- # Return the nth lr_item from the production (or None if at the end)
- def lr_item(self,n):
- if n > len(self.prod): return None
- p = LRItem(self,n)
-
- # Precompute the list of productions immediately following. Hack. Remove later
- try:
- p.lr_after = Prodnames[p.prod[n+1]]
- except (IndexError,KeyError):
- p.lr_after = []
- try:
- p.lr_before = p.prod[n-1]
- except IndexError:
- p.lr_before = None
-
- return p
-
- # Bind the production function name to a callable
- def bind(self,pdict):
- if self.func:
- self.callable = pdict[self.func]
-
-# This class serves as a minimal standin for Production objects when
-# reading table data from files. It only contains information
-# actually used by the LR parsing engine, plus some additional
-# debugging information.
-class MiniProduction(object):
- def __init__(self,str,name,len,func,file,line):
- self.name = name
- self.len = len
- self.func = func
- self.callable = None
- self.file = file
- self.line = line
- self.str = str
- def __str__(self):
- return self.str
- def __repr__(self):
- return "MiniProduction(%s)" % self.str
-
- # Bind the production function name to a callable
- def bind(self,pdict):
- if self.func:
- self.callable = pdict[self.func]
-
-
-# -----------------------------------------------------------------------------
-# class LRItem
-#
-# This class represents a specific stage of parsing a production rule. For
-# example:
-#
-# expr : expr . PLUS term
-#
-# In the above, the "." represents the current location of the parse. Here
-# basic attributes:
-#
-# name - Name of the production. For example 'expr'
-# prod - A list of symbols on the right side ['expr','.', 'PLUS','term']
-# number - Production number.
-#
-# lr_next Next LR item. Example, if we are ' expr -> expr . PLUS term'
-# then lr_next refers to 'expr -> expr PLUS . term'
-# lr_index - LR item index (location of the ".") in the prod list.
-# lookaheads - LALR lookahead symbols for this item
-# len - Length of the production (number of symbols on right hand side)
-# lr_after - List of all productions that immediately follow
-# lr_before - Grammar symbol immediately before
-# -----------------------------------------------------------------------------
-
-class LRItem(object):
- def __init__(self,p,n):
- self.name = p.name
- self.prod = list(p.prod)
- self.number = p.number
- self.lr_index = n
- self.lookaheads = { }
- self.prod.insert(n,".")
- self.prod = tuple(self.prod)
- self.len = len(self.prod)
- self.usyms = p.usyms
-
- def __str__(self):
- if self.prod:
- s = "%s -> %s" % (self.name," ".join(self.prod))
- else:
- s = "%s -> <empty>" % self.name
- return s
-
- def __repr__(self):
- return "LRItem("+str(self)+")"
-
-# -----------------------------------------------------------------------------
-# rightmost_terminal()
-#
-# Return the rightmost terminal from a list of symbols. Used in add_production()
-# -----------------------------------------------------------------------------
-def rightmost_terminal(symbols, terminals):
- i = len(symbols) - 1
- while i >= 0:
- if symbols[i] in terminals:
- return symbols[i]
- i -= 1
- return None
-
-# -----------------------------------------------------------------------------
-# === GRAMMAR CLASS ===
-#
-# The following class represents the contents of the specified grammar along
-# with various computed properties such as first sets, follow sets, LR items, etc.
-# This data is used for critical parts of the table generation process later.
-# -----------------------------------------------------------------------------
-
-class GrammarError(YaccError): pass
-
-class Grammar(object):
- def __init__(self,terminals):
- self.Productions = [None] # A list of all of the productions. The first
- # entry is always reserved for the purpose of
- # building an augmented grammar
-
- self.Prodnames = { } # A dictionary mapping the names of nonterminals to a list of all
- # productions of that nonterminal.
-
- self.Prodmap = { } # A dictionary that is only used to detect duplicate
- # productions.
-
- self.Terminals = { } # A dictionary mapping the names of terminal symbols to a
- # list of the rules where they are used.
-
- for term in terminals:
- self.Terminals[term] = []
-
- self.Terminals['error'] = []
-
- self.Nonterminals = { } # A dictionary mapping names of nonterminals to a list
- # of rule numbers where they are used.
-
- self.First = { } # A dictionary of precomputed FIRST(x) symbols
-
- self.Follow = { } # A dictionary of precomputed FOLLOW(x) symbols
-
- self.Precedence = { } # Precedence rules for each terminal. Contains tuples of the
- # form ('right',level) or ('nonassoc', level) or ('left',level)
-
- self.UsedPrecedence = { } # Precedence rules that were actually used by the grammer.
- # This is only used to provide error checking and to generate
- # a warning about unused precedence rules.
-
- self.Start = None # Starting symbol for the grammar
-
-
- def __len__(self):
- return len(self.Productions)
-
- def __getitem__(self,index):
- return self.Productions[index]
-
- # -----------------------------------------------------------------------------
- # set_precedence()
- #
- # Sets the precedence for a given terminal. assoc is the associativity such as
- # 'left','right', or 'nonassoc'. level is a numeric level.
- #
- # -----------------------------------------------------------------------------
-
- def set_precedence(self,term,assoc,level):
- assert self.Productions == [None],"Must call set_precedence() before add_production()"
- if term in self.Precedence:
- raise GrammarError("Precedence already specified for terminal '%s'" % term)
- if assoc not in ['left','right','nonassoc']:
- raise GrammarError("Associativity must be one of 'left','right', or 'nonassoc'")
- self.Precedence[term] = (assoc,level)
-
- # -----------------------------------------------------------------------------
- # add_production()
- #
- # Given an action function, this function assembles a production rule and
- # computes its precedence level.
- #
- # The production rule is supplied as a list of symbols. For example,
- # a rule such as 'expr : expr PLUS term' has a production name of 'expr' and
- # symbols ['expr','PLUS','term'].
- #
- # Precedence is determined by the precedence of the right-most non-terminal
- # or the precedence of a terminal specified by %prec.
- #
- # A variety of error checks are performed to make sure production symbols
- # are valid and that %prec is used correctly.
- # -----------------------------------------------------------------------------
-
- def add_production(self,prodname,syms,func=None,file='',line=0):
-
- if prodname in self.Terminals:
- raise GrammarError("%s:%d: Illegal rule name '%s'. Already defined as a token" % (file,line,prodname))
- if prodname == 'error':
- raise GrammarError("%s:%d: Illegal rule name '%s'. error is a reserved word" % (file,line,prodname))
- if not _is_identifier.match(prodname):
- raise GrammarError("%s:%d: Illegal rule name '%s'" % (file,line,prodname))
-
- # Look for literal tokens
- for n,s in enumerate(syms):
- if s[0] in "'\"":
- try:
- c = eval(s)
- if (len(c) > 1):
- raise GrammarError("%s:%d: Literal token %s in rule '%s' may only be a single character" % (file,line,s, prodname))
- if not c in self.Terminals:
- self.Terminals[c] = []
- syms[n] = c
- continue
- except SyntaxError:
- pass
- if not _is_identifier.match(s) and s != '%prec':
- raise GrammarError("%s:%d: Illegal name '%s' in rule '%s'" % (file,line,s, prodname))
-
- # Determine the precedence level
- if '%prec' in syms:
- if syms[-1] == '%prec':
- raise GrammarError("%s:%d: Syntax error. Nothing follows %%prec" % (file,line))
- if syms[-2] != '%prec':
- raise GrammarError("%s:%d: Syntax error. %%prec can only appear at the end of a grammar rule" % (file,line))
- precname = syms[-1]
- prodprec = self.Precedence.get(precname,None)
- if not prodprec:
- raise GrammarError("%s:%d: Nothing known about the precedence of '%s'" % (file,line,precname))
- else:
- self.UsedPrecedence[precname] = 1
- del syms[-2:] # Drop %prec from the rule
- else:
- # If no %prec, precedence is determined by the rightmost terminal symbol
- precname = rightmost_terminal(syms,self.Terminals)
- prodprec = self.Precedence.get(precname,('right',0))
-
- # See if the rule is already in the rulemap
- map = "%s -> %s" % (prodname,syms)
- if map in self.Prodmap:
- m = self.Prodmap[map]
- raise GrammarError("%s:%d: Duplicate rule %s. " % (file,line, m) +
- "Previous definition at %s:%d" % (m.file, m.line))
-
- # From this point on, everything is valid. Create a new Production instance
- pnumber = len(self.Productions)
- if not prodname in self.Nonterminals:
- self.Nonterminals[prodname] = [ ]
-
- # Add the production number to Terminals and Nonterminals
- for t in syms:
- if t in self.Terminals:
- self.Terminals[t].append(pnumber)
- else:
- if not t in self.Nonterminals:
- self.Nonterminals[t] = [ ]
- self.Nonterminals[t].append(pnumber)
-
- # Create a production and add it to the list of productions
- p = Production(pnumber,prodname,syms,prodprec,func,file,line)
- self.Productions.append(p)
- self.Prodmap[map] = p
-
- # Add to the global productions list
- try:
- self.Prodnames[prodname].append(p)
- except KeyError:
- self.Prodnames[prodname] = [ p ]
- return 0
-
- # -----------------------------------------------------------------------------
- # set_start()
- #
- # Sets the starting symbol and creates the augmented grammar. Production
- # rule 0 is S' -> start where start is the start symbol.
- # -----------------------------------------------------------------------------
-
- def set_start(self,start=None):
- if not start:
- start = self.Productions[1].name
- if start not in self.Nonterminals:
- raise GrammarError("start symbol %s undefined" % start)
- self.Productions[0] = Production(0,"S'",[start])
- self.Nonterminals[start].append(0)
- self.Start = start
-
- # -----------------------------------------------------------------------------
- # find_unreachable()
- #
- # Find all of the nonterminal symbols that can't be reached from the starting
- # symbol. Returns a list of nonterminals that can't be reached.
- # -----------------------------------------------------------------------------
-
- def find_unreachable(self):
-
- # Mark all symbols that are reachable from a symbol s
- def mark_reachable_from(s):
- if reachable[s]:
- # We've already reached symbol s.
- return
- reachable[s] = 1
- for p in self.Prodnames.get(s,[]):
- for r in p.prod:
- mark_reachable_from(r)
-
- reachable = { }
- for s in list(self.Terminals) + list(self.Nonterminals):
- reachable[s] = 0
-
- mark_reachable_from( self.Productions[0].prod[0] )
-
- return [s for s in list(self.Nonterminals)
- if not reachable[s]]
-
- # -----------------------------------------------------------------------------
- # infinite_cycles()
- #
- # This function looks at the various parsing rules and tries to detect
- # infinite recursion cycles (grammar rules where there is no possible way
- # to derive a string of only terminals).
- # -----------------------------------------------------------------------------
-
- def infinite_cycles(self):
- terminates = {}
-
- # Terminals:
- for t in self.Terminals:
- terminates[t] = 1
-
- terminates['$end'] = 1
-
- # Nonterminals:
-
- # Initialize to false:
- for n in self.Nonterminals:
- terminates[n] = 0
-
- # Then propagate termination until no change:
- while 1:
- some_change = 0
- for (n,pl) in self.Prodnames.items():
- # Nonterminal n terminates iff any of its productions terminates.
- for p in pl:
- # Production p terminates iff all of its rhs symbols terminate.
- for s in p.prod:
- if not terminates[s]:
- # The symbol s does not terminate,
- # so production p does not terminate.
- p_terminates = 0
- break
- else:
- # didn't break from the loop,
- # so every symbol s terminates
- # so production p terminates.
- p_terminates = 1
-
- if p_terminates:
- # symbol n terminates!
- if not terminates[n]:
- terminates[n] = 1
- some_change = 1
- # Don't need to consider any more productions for this n.
- break
-
- if not some_change:
- break
-
- infinite = []
- for (s,term) in terminates.items():
- if not term:
- if not s in self.Prodnames and not s in self.Terminals and s != 'error':
- # s is used-but-not-defined, and we've already warned of that,
- # so it would be overkill to say that it's also non-terminating.
- pass
- else:
- infinite.append(s)
-
- return infinite
-
-
- # -----------------------------------------------------------------------------
- # undefined_symbols()
- #
- # Find all symbols that were used the grammar, but not defined as tokens or
- # grammar rules. Returns a list of tuples (sym, prod) where sym in the symbol
- # and prod is the production where the symbol was used.
- # -----------------------------------------------------------------------------
- def undefined_symbols(self):
- result = []
- for p in self.Productions:
- if not p: continue
-
- for s in p.prod:
- if not s in self.Prodnames and not s in self.Terminals and s != 'error':
- result.append((s,p))
- return result
-
- # -----------------------------------------------------------------------------
- # unused_terminals()
- #
- # Find all terminals that were defined, but not used by the grammar. Returns
- # a list of all symbols.
- # -----------------------------------------------------------------------------
- def unused_terminals(self):
- unused_tok = []
- for s,v in self.Terminals.items():
- if s != 'error' and not v:
- unused_tok.append(s)
-
- return unused_tok
-
- # ------------------------------------------------------------------------------
- # unused_rules()
- #
- # Find all grammar rules that were defined, but not used (maybe not reachable)
- # Returns a list of productions.
- # ------------------------------------------------------------------------------
-
- def unused_rules(self):
- unused_prod = []
- for s,v in self.Nonterminals.items():
- if not v:
- p = self.Prodnames[s][0]
- unused_prod.append(p)
- return unused_prod
-
- # -----------------------------------------------------------------------------
- # unused_precedence()
- #
- # Returns a list of tuples (term,precedence) corresponding to precedence
- # rules that were never used by the grammar. term is the name of the terminal
- # on which precedence was applied and precedence is a string such as 'left' or
- # 'right' corresponding to the type of precedence.
- # -----------------------------------------------------------------------------
-
- def unused_precedence(self):
- unused = []
- for termname in self.Precedence:
- if not (termname in self.Terminals or termname in self.UsedPrecedence):
- unused.append((termname,self.Precedence[termname][0]))
-
- return unused
-
- # -------------------------------------------------------------------------
- # _first()
- #
- # Compute the value of FIRST1(beta) where beta is a tuple of symbols.
- #
- # During execution of compute_first1, the result may be incomplete.
- # Afterward (e.g., when called from compute_follow()), it will be complete.
- # -------------------------------------------------------------------------
- def _first(self,beta):
-
- # We are computing First(x1,x2,x3,...,xn)
- result = [ ]
- for x in beta:
- x_produces_empty = 0
-
- # Add all the non-<empty> symbols of First[x] to the result.
- for f in self.First[x]:
- if f == '<empty>':
- x_produces_empty = 1
- else:
- if f not in result: result.append(f)
-
- if x_produces_empty:
- # We have to consider the next x in beta,
- # i.e. stay in the loop.
- pass
- else:
- # We don't have to consider any further symbols in beta.
- break
- else:
- # There was no 'break' from the loop,
- # so x_produces_empty was true for all x in beta,
- # so beta produces empty as well.
- result.append('<empty>')
-
- return result
-
- # -------------------------------------------------------------------------
- # compute_first()
- #
- # Compute the value of FIRST1(X) for all symbols
- # -------------------------------------------------------------------------
- def compute_first(self):
- if self.First:
- return self.First
-
- # Terminals:
- for t in self.Terminals:
- self.First[t] = [t]
-
- self.First['$end'] = ['$end']
-
- # Nonterminals:
-
- # Initialize to the empty set:
- for n in self.Nonterminals:
- self.First[n] = []
-
- # Then propagate symbols until no change:
- while 1:
- some_change = 0
- for n in self.Nonterminals:
- for p in self.Prodnames[n]:
- for f in self._first(p.prod):
- if f not in self.First[n]:
- self.First[n].append( f )
- some_change = 1
- if not some_change:
- break
-
- return self.First
-
- # ---------------------------------------------------------------------
- # compute_follow()
- #
- # Computes all of the follow sets for every non-terminal symbol. The
- # follow set is the set of all symbols that might follow a given
- # non-terminal. See the Dragon book, 2nd Ed. p. 189.
- # ---------------------------------------------------------------------
- def compute_follow(self,start=None):
- # If already computed, return the result
- if self.Follow:
- return self.Follow
-
- # If first sets not computed yet, do that first.
- if not self.First:
- self.compute_first()
-
- # Add '$end' to the follow list of the start symbol
- for k in self.Nonterminals:
- self.Follow[k] = [ ]
-
- if not start:
- start = self.Productions[1].name
-
- self.Follow[start] = [ '$end' ]
-
- while 1:
- didadd = 0
- for p in self.Productions[1:]:
- # Here is the production set
- for i in range(len(p.prod)):
- B = p.prod[i]
- if B in self.Nonterminals:
- # Okay. We got a non-terminal in a production
- fst = self._first(p.prod[i+1:])
- hasempty = 0
- for f in fst:
- if f != '<empty>' and f not in self.Follow[B]:
- self.Follow[B].append(f)
- didadd = 1
- if f == '<empty>':
- hasempty = 1
- if hasempty or i == (len(p.prod)-1):
- # Add elements of follow(a) to follow(b)
- for f in self.Follow[p.name]:
- if f not in self.Follow[B]:
- self.Follow[B].append(f)
- didadd = 1
- if not didadd: break
- return self.Follow
-
-
- # -----------------------------------------------------------------------------
- # build_lritems()
- #
- # This function walks the list of productions and builds a complete set of the
- # LR items. The LR items are stored in two ways: First, they are uniquely
- # numbered and placed in the list _lritems. Second, a linked list of LR items
- # is built for each production. For example:
- #
- # E -> E PLUS E
- #
- # Creates the list
- #
- # [E -> . E PLUS E, E -> E . PLUS E, E -> E PLUS . E, E -> E PLUS E . ]
- # -----------------------------------------------------------------------------
-
- def build_lritems(self):
- for p in self.Productions:
- lastlri = p
- i = 0
- lr_items = []
- while 1:
- if i > len(p):
- lri = None
- else:
- lri = LRItem(p,i)
- # Precompute the list of productions immediately following
- try:
- lri.lr_after = self.Prodnames[lri.prod[i+1]]
- except (IndexError,KeyError):
- lri.lr_after = []
- try:
- lri.lr_before = lri.prod[i-1]
- except IndexError:
- lri.lr_before = None
-
- lastlri.lr_next = lri
- if not lri: break
- lr_items.append(lri)
- lastlri = lri
- i += 1
- p.lr_items = lr_items
-
-# -----------------------------------------------------------------------------
-# == Class LRTable ==
-#
-# This basic class represents a basic table of LR parsing information.
-# Methods for generating the tables are not defined here. They are defined
-# in the derived class LRGeneratedTable.
-# -----------------------------------------------------------------------------
-
-class VersionError(YaccError): pass
-
-class LRTable(object):
- def __init__(self):
- self.lr_action = None
- self.lr_goto = None
- self.lr_productions = None
- self.lr_method = None
-
- def read_table(self,module):
- if isinstance(module,types.ModuleType):
- parsetab = module
- else:
- if sys.version_info[0] < 3:
- exec("import %s as parsetab" % module)
- else:
- env = { }
- exec("import %s as parsetab" % module, env, env)
- parsetab = env['parsetab']
-
- if parsetab._tabversion != __tabversion__:
- raise VersionError("yacc table file version is out of date")
-
- self.lr_action = parsetab._lr_action
- self.lr_goto = parsetab._lr_goto
-
- self.lr_productions = []
- for p in parsetab._lr_productions:
- self.lr_productions.append(MiniProduction(*p))
-
- self.lr_method = parsetab._lr_method
- return parsetab._lr_signature
-
- def read_pickle(self,filename):
- try:
- import cPickle as pickle
- except ImportError:
- import pickle
-
- in_f = open(filename,"rb")
-
- tabversion = pickle.load(in_f)
- if tabversion != __tabversion__:
- raise VersionError("yacc table file version is out of date")
- self.lr_method = pickle.load(in_f)
- signature = pickle.load(in_f)
- self.lr_action = pickle.load(in_f)
- self.lr_goto = pickle.load(in_f)
- productions = pickle.load(in_f)
-
- self.lr_productions = []
- for p in productions:
- self.lr_productions.append(MiniProduction(*p))
-
- in_f.close()
- return signature
-
- # Bind all production function names to callable objects in pdict
- def bind_callables(self,pdict):
- for p in self.lr_productions:
- p.bind(pdict)
-
-# -----------------------------------------------------------------------------
-# === LR Generator ===
-#
-# The following classes and functions are used to generate LR parsing tables on
-# a grammar.
-# -----------------------------------------------------------------------------
-
-# -----------------------------------------------------------------------------
-# digraph()
-# traverse()
-#
-# The following two functions are used to compute set valued functions
-# of the form:
-#
-# F(x) = F'(x) U U{F(y) | x R y}
-#
-# This is used to compute the values of Read() sets as well as FOLLOW sets
-# in LALR(1) generation.
-#
-# Inputs: X - An input set
-# R - A relation
-# FP - Set-valued function
-# ------------------------------------------------------------------------------
-
-def digraph(X,R,FP):
- N = { }
- for x in X:
- N[x] = 0
- stack = []
- F = { }
- for x in X:
- if N[x] == 0: traverse(x,N,stack,F,X,R,FP)
- return F
-
-def traverse(x,N,stack,F,X,R,FP):
- stack.append(x)
- d = len(stack)
- N[x] = d
- F[x] = FP(x) # F(X) <- F'(x)
-
- rel = R(x) # Get y's related to x
- for y in rel:
- if N[y] == 0:
- traverse(y,N,stack,F,X,R,FP)
- N[x] = min(N[x],N[y])
- for a in F.get(y,[]):
- if a not in F[x]: F[x].append(a)
- if N[x] == d:
- N[stack[-1]] = MAXINT
- F[stack[-1]] = F[x]
- element = stack.pop()
- while element != x:
- N[stack[-1]] = MAXINT
- F[stack[-1]] = F[x]
- element = stack.pop()
-
-class LALRError(YaccError): pass
-
-# -----------------------------------------------------------------------------
-# == LRGeneratedTable ==
-#
-# This class implements the LR table generation algorithm. There are no
-# public methods except for write()
-# -----------------------------------------------------------------------------
-
-class LRGeneratedTable(LRTable):
- def __init__(self,grammar,method='LALR',log=None):
- if method not in ['SLR','LALR']:
- raise LALRError("Unsupported method %s" % method)
-
- self.grammar = grammar
- self.lr_method = method
-
- # Set up the logger
- if not log:
- log = NullLogger()
- self.log = log
-
- # Internal attributes
- self.lr_action = {} # Action table
- self.lr_goto = {} # Goto table
- self.lr_productions = grammar.Productions # Copy of grammar Production array
- self.lr_goto_cache = {} # Cache of computed gotos
- self.lr0_cidhash = {} # Cache of closures
-
- self._add_count = 0 # Internal counter used to detect cycles
-
- # Diagonistic information filled in by the table generator
- self.sr_conflict = 0
- self.rr_conflict = 0
- self.conflicts = [] # List of conflicts
-
- self.sr_conflicts = []
- self.rr_conflicts = []
-
- # Build the tables
- self.grammar.build_lritems()
- self.grammar.compute_first()
- self.grammar.compute_follow()
- self.lr_parse_table()
-
- # Compute the LR(0) closure operation on I, where I is a set of LR(0) items.
-
- def lr0_closure(self,I):
- self._add_count += 1
-
- # Add everything in I to J
- J = I[:]
- didadd = 1
- while didadd:
- didadd = 0
- for j in J:
- for x in j.lr_after:
- if getattr(x,"lr0_added",0) == self._add_count: continue
- # Add B --> .G to J
- J.append(x.lr_next)
- x.lr0_added = self._add_count
- didadd = 1
-
- return J
-
- # Compute the LR(0) goto function goto(I,X) where I is a set
- # of LR(0) items and X is a grammar symbol. This function is written
- # in a way that guarantees uniqueness of the generated goto sets
- # (i.e. the same goto set will never be returned as two different Python
- # objects). With uniqueness, we can later do fast set comparisons using
- # id(obj) instead of element-wise comparison.
-
- def lr0_goto(self,I,x):
- # First we look for a previously cached entry
- g = self.lr_goto_cache.get((id(I),x),None)
- if g: return g
-
- # Now we generate the goto set in a way that guarantees uniqueness
- # of the result
-
- s = self.lr_goto_cache.get(x,None)
- if not s:
- s = { }
- self.lr_goto_cache[x] = s
-
- gs = [ ]
- for p in I:
- n = p.lr_next
- if n and n.lr_before == x:
- s1 = s.get(id(n),None)
- if not s1:
- s1 = { }
- s[id(n)] = s1
- gs.append(n)
- s = s1
- g = s.get('$end',None)
- if not g:
- if gs:
- g = self.lr0_closure(gs)
- s['$end'] = g
- else:
- s['$end'] = gs
- self.lr_goto_cache[(id(I),x)] = g
- return g
-
- # Compute the LR(0) sets of item function
- def lr0_items(self):
-
- C = [ self.lr0_closure([self.grammar.Productions[0].lr_next]) ]
- i = 0
- for I in C:
- self.lr0_cidhash[id(I)] = i
- i += 1
-
- # Loop over the items in C and each grammar symbols
- i = 0
- while i < len(C):
- I = C[i]
- i += 1
-
- # Collect all of the symbols that could possibly be in the goto(I,X) sets
- asyms = { }
- for ii in I:
- for s in ii.usyms:
- asyms[s] = None
-
- for x in asyms:
- g = self.lr0_goto(I,x)
- if not g: continue
- if id(g) in self.lr0_cidhash: continue
- self.lr0_cidhash[id(g)] = len(C)
- C.append(g)
-
- return C
-
- # -----------------------------------------------------------------------------
- # ==== LALR(1) Parsing ====
- #
- # LALR(1) parsing is almost exactly the same as SLR except that instead of
- # relying upon Follow() sets when performing reductions, a more selective
- # lookahead set that incorporates the state of the LR(0) machine is utilized.
- # Thus, we mainly just have to focus on calculating the lookahead sets.
- #
- # The method used here is due to DeRemer and Pennelo (1982).
- #
- # DeRemer, F. L., and T. J. Pennelo: "Efficient Computation of LALR(1)
- # Lookahead Sets", ACM Transactions on Programming Languages and Systems,
- # Vol. 4, No. 4, Oct. 1982, pp. 615-649
- #
- # Further details can also be found in:
- #
- # J. Tremblay and P. Sorenson, "The Theory and Practice of Compiler Writing",
- # McGraw-Hill Book Company, (1985).
- #
- # -----------------------------------------------------------------------------
-
- # -----------------------------------------------------------------------------
- # compute_nullable_nonterminals()
- #
- # Creates a dictionary containing all of the non-terminals that might produce
- # an empty production.
- # -----------------------------------------------------------------------------
-
- def compute_nullable_nonterminals(self):
- nullable = {}
- num_nullable = 0
- while 1:
- for p in self.grammar.Productions[1:]:
- if p.len == 0:
- nullable[p.name] = 1
- continue
- for t in p.prod:
- if not t in nullable: break
- else:
- nullable[p.name] = 1
- if len(nullable) == num_nullable: break
- num_nullable = len(nullable)
- return nullable
-
- # -----------------------------------------------------------------------------
- # find_nonterminal_trans(C)
- #
- # Given a set of LR(0) items, this functions finds all of the non-terminal
- # transitions. These are transitions in which a dot appears immediately before
- # a non-terminal. Returns a list of tuples of the form (state,N) where state
- # is the state number and N is the nonterminal symbol.
- #
- # The input C is the set of LR(0) items.
- # -----------------------------------------------------------------------------
-
- def find_nonterminal_transitions(self,C):
- trans = []
- for state in range(len(C)):
- for p in C[state]:
- if p.lr_index < p.len - 1:
- t = (state,p.prod[p.lr_index+1])
- if t[1] in self.grammar.Nonterminals:
- if t not in trans: trans.append(t)
- state = state + 1
- return trans
-
- # -----------------------------------------------------------------------------
- # dr_relation()
- #
- # Computes the DR(p,A) relationships for non-terminal transitions. The input
- # is a tuple (state,N) where state is a number and N is a nonterminal symbol.
- #
- # Returns a list of terminals.
- # -----------------------------------------------------------------------------
-
- def dr_relation(self,C,trans,nullable):
- dr_set = { }
- state,N = trans
- terms = []
-
- g = self.lr0_goto(C[state],N)
- for p in g:
- if p.lr_index < p.len - 1:
- a = p.prod[p.lr_index+1]
- if a in self.grammar.Terminals:
- if a not in terms: terms.append(a)
-
- # This extra bit is to handle the start state
- if state == 0 and N == self.grammar.Productions[0].prod[0]:
- terms.append('$end')
-
- return terms
-
- # -----------------------------------------------------------------------------
- # reads_relation()
- #
- # Computes the READS() relation (p,A) READS (t,C).
- # -----------------------------------------------------------------------------
-
- def reads_relation(self,C, trans, empty):
- # Look for empty transitions
- rel = []
- state, N = trans
-
- g = self.lr0_goto(C[state],N)
- j = self.lr0_cidhash.get(id(g),-1)
- for p in g:
- if p.lr_index < p.len - 1:
- a = p.prod[p.lr_index + 1]
- if a in empty:
- rel.append((j,a))
-
- return rel
-
- # -----------------------------------------------------------------------------
- # compute_lookback_includes()
- #
- # Determines the lookback and includes relations
- #
- # LOOKBACK:
- #
- # This relation is determined by running the LR(0) state machine forward.
- # For example, starting with a production "N : . A B C", we run it forward
- # to obtain "N : A B C ." We then build a relationship between this final
- # state and the starting state. These relationships are stored in a dictionary
- # lookdict.
- #
- # INCLUDES:
- #
- # Computes the INCLUDE() relation (p,A) INCLUDES (p',B).
- #
- # This relation is used to determine non-terminal transitions that occur
- # inside of other non-terminal transition states. (p,A) INCLUDES (p', B)
- # if the following holds:
- #
- # B -> LAT, where T -> epsilon and p' -L-> p
- #
- # L is essentially a prefix (which may be empty), T is a suffix that must be
- # able to derive an empty string. State p' must lead to state p with the string L.
- #
- # -----------------------------------------------------------------------------
-
- def compute_lookback_includes(self,C,trans,nullable):
-
- lookdict = {} # Dictionary of lookback relations
- includedict = {} # Dictionary of include relations
-
- # Make a dictionary of non-terminal transitions
- dtrans = {}
- for t in trans:
- dtrans[t] = 1
-
- # Loop over all transitions and compute lookbacks and includes
- for state,N in trans:
- lookb = []
- includes = []
- for p in C[state]:
- if p.name != N: continue
-
- # Okay, we have a name match. We now follow the production all the way
- # through the state machine until we get the . on the right hand side
-
- lr_index = p.lr_index
- j = state
- while lr_index < p.len - 1:
- lr_index = lr_index + 1
- t = p.prod[lr_index]
-
- # Check to see if this symbol and state are a non-terminal transition
- if (j,t) in dtrans:
- # Yes. Okay, there is some chance that this is an includes relation
- # the only way to know for certain is whether the rest of the
- # production derives empty
-
- li = lr_index + 1
- while li < p.len:
- if p.prod[li] in self.grammar.Terminals: break # No forget it
- if not p.prod[li] in nullable: break
- li = li + 1
- else:
- # Appears to be a relation between (j,t) and (state,N)
- includes.append((j,t))
-
- g = self.lr0_goto(C[j],t) # Go to next set
- j = self.lr0_cidhash.get(id(g),-1) # Go to next state
-
- # When we get here, j is the final state, now we have to locate the production
- for r in C[j]:
- if r.name != p.name: continue
- if r.len != p.len: continue
- i = 0
- # This look is comparing a production ". A B C" with "A B C ."
- while i < r.lr_index:
- if r.prod[i] != p.prod[i+1]: break
- i = i + 1
- else:
- lookb.append((j,r))
- for i in includes:
- if not i in includedict: includedict[i] = []
- includedict[i].append((state,N))
- lookdict[(state,N)] = lookb
-
- return lookdict,includedict
-
- # -----------------------------------------------------------------------------
- # compute_read_sets()
- #
- # Given a set of LR(0) items, this function computes the read sets.
- #
- # Inputs: C = Set of LR(0) items
- # ntrans = Set of nonterminal transitions
- # nullable = Set of empty transitions
- #
- # Returns a set containing the read sets
- # -----------------------------------------------------------------------------
-
- def compute_read_sets(self,C, ntrans, nullable):
- FP = lambda x: self.dr_relation(C,x,nullable)
- R = lambda x: self.reads_relation(C,x,nullable)
- F = digraph(ntrans,R,FP)
- return F
-
- # -----------------------------------------------------------------------------
- # compute_follow_sets()
- #
- # Given a set of LR(0) items, a set of non-terminal transitions, a readset,
- # and an include set, this function computes the follow sets
- #
- # Follow(p,A) = Read(p,A) U U {Follow(p',B) | (p,A) INCLUDES (p',B)}
- #
- # Inputs:
- # ntrans = Set of nonterminal transitions
- # readsets = Readset (previously computed)
- # inclsets = Include sets (previously computed)
- #
- # Returns a set containing the follow sets
- # -----------------------------------------------------------------------------
-
- def compute_follow_sets(self,ntrans,readsets,inclsets):
- FP = lambda x: readsets[x]
- R = lambda x: inclsets.get(x,[])
- F = digraph(ntrans,R,FP)
- return F
-
- # -----------------------------------------------------------------------------
- # add_lookaheads()
- #
- # Attaches the lookahead symbols to grammar rules.
- #
- # Inputs: lookbacks - Set of lookback relations
- # followset - Computed follow set
- #
- # This function directly attaches the lookaheads to productions contained
- # in the lookbacks set
- # -----------------------------------------------------------------------------
-
- def add_lookaheads(self,lookbacks,followset):
- for trans,lb in lookbacks.items():
- # Loop over productions in lookback
- for state,p in lb:
- if not state in p.lookaheads:
- p.lookaheads[state] = []
- f = followset.get(trans,[])
- for a in f:
- if a not in p.lookaheads[state]: p.lookaheads[state].append(a)
-
- # -----------------------------------------------------------------------------
- # add_lalr_lookaheads()
- #
- # This function does all of the work of adding lookahead information for use
- # with LALR parsing
- # -----------------------------------------------------------------------------
-
- def add_lalr_lookaheads(self,C):
- # Determine all of the nullable nonterminals
- nullable = self.compute_nullable_nonterminals()
-
- # Find all non-terminal transitions
- trans = self.find_nonterminal_transitions(C)
-
- # Compute read sets
- readsets = self.compute_read_sets(C,trans,nullable)
-
- # Compute lookback/includes relations
- lookd, included = self.compute_lookback_includes(C,trans,nullable)
-
- # Compute LALR FOLLOW sets
- followsets = self.compute_follow_sets(trans,readsets,included)
-
- # Add all of the lookaheads
- self.add_lookaheads(lookd,followsets)
-
- # -----------------------------------------------------------------------------
- # lr_parse_table()
- #
- # This function constructs the parse tables for SLR or LALR
- # -----------------------------------------------------------------------------
- def lr_parse_table(self):
- Productions = self.grammar.Productions
- Precedence = self.grammar.Precedence
- goto = self.lr_goto # Goto array
- action = self.lr_action # Action array
- log = self.log # Logger for output
-
- actionp = { } # Action production array (temporary)
-
- log.info("Parsing method: %s", self.lr_method)
-
- # Step 1: Construct C = { I0, I1, ... IN}, collection of LR(0) items
- # This determines the number of states
-
- C = self.lr0_items()
-
- if self.lr_method == 'LALR':
- self.add_lalr_lookaheads(C)
-
- # Build the parser table, state by state
- st = 0
- for I in C:
- # Loop over each production in I
- actlist = [ ] # List of actions
- st_action = { }
- st_actionp = { }
- st_goto = { }
- log.info("")
- log.info("state %d", st)
- log.info("")
- for p in I:
- log.info(" (%d) %s", p.number, str(p))
- log.info("")
-
- for p in I:
- if p.len == p.lr_index + 1:
- if p.name == "S'":
- # Start symbol. Accept!
- st_action["$end"] = 0
- st_actionp["$end"] = p
- else:
- # We are at the end of a production. Reduce!
- if self.lr_method == 'LALR':
- laheads = p.lookaheads[st]
- else:
- laheads = self.grammar.Follow[p.name]
- for a in laheads:
- actlist.append((a,p,"reduce using rule %d (%s)" % (p.number,p)))
- r = st_action.get(a,None)
- if r is not None:
- # Whoa. Have a shift/reduce or reduce/reduce conflict
- if r > 0:
- # Need to decide on shift or reduce here
- # By default we favor shifting. Need to add
- # some precedence rules here.
- sprec,slevel = Productions[st_actionp[a].number].prec
- rprec,rlevel = Precedence.get(a,('right',0))
- if (slevel < rlevel) or ((slevel == rlevel) and (rprec == 'left')):
- # We really need to reduce here.
- st_action[a] = -p.number
- st_actionp[a] = p
- if not slevel and not rlevel:
- log.info(" ! shift/reduce conflict for %s resolved as reduce",a)
- self.sr_conflicts.append((st,a,'reduce'))
- Productions[p.number].reduced += 1
- elif (slevel == rlevel) and (rprec == 'nonassoc'):
- st_action[a] = None
- else:
- # Hmmm. Guess we'll keep the shift
- if not rlevel:
- log.info(" ! shift/reduce conflict for %s resolved as shift",a)
- self.sr_conflicts.append((st,a,'shift'))
- elif r < 0:
- # Reduce/reduce conflict. In this case, we favor the rule
- # that was defined first in the grammar file
- oldp = Productions[-r]
- pp = Productions[p.number]
- if oldp.line > pp.line:
- st_action[a] = -p.number
- st_actionp[a] = p
- chosenp,rejectp = pp,oldp
- Productions[p.number].reduced += 1
- Productions[oldp.number].reduced -= 1
- else:
- chosenp,rejectp = oldp,pp
- self.rr_conflicts.append((st,chosenp,rejectp))
- log.info(" ! reduce/reduce conflict for %s resolved using rule %d (%s)", a,st_actionp[a].number, st_actionp[a])
- else:
- raise LALRError("Unknown conflict in state %d" % st)
- else:
- st_action[a] = -p.number
- st_actionp[a] = p
- Productions[p.number].reduced += 1
- else:
- i = p.lr_index
- a = p.prod[i+1] # Get symbol right after the "."
- if a in self.grammar.Terminals:
- g = self.lr0_goto(I,a)
- j = self.lr0_cidhash.get(id(g),-1)
- if j >= 0:
- # We are in a shift state
- actlist.append((a,p,"shift and go to state %d" % j))
- r = st_action.get(a,None)
- if r is not None:
- # Whoa have a shift/reduce or shift/shift conflict
- if r > 0:
- if r != j:
- raise LALRError("Shift/shift conflict in state %d" % st)
- elif r < 0:
- # Do a precedence check.
- # - if precedence of reduce rule is higher, we reduce.
- # - if precedence of reduce is same and left assoc, we reduce.
- # - otherwise we shift
- rprec,rlevel = Productions[st_actionp[a].number].prec
- sprec,slevel = Precedence.get(a,('right',0))
- if (slevel > rlevel) or ((slevel == rlevel) and (rprec == 'right')):
- # We decide to shift here... highest precedence to shift
- Productions[st_actionp[a].number].reduced -= 1
- st_action[a] = j
- st_actionp[a] = p
- if not rlevel:
- log.info(" ! shift/reduce conflict for %s resolved as shift",a)
- self.sr_conflicts.append((st,a,'shift'))
- elif (slevel == rlevel) and (rprec == 'nonassoc'):
- st_action[a] = None
- else:
- # Hmmm. Guess we'll keep the reduce
- if not slevel and not rlevel:
- log.info(" ! shift/reduce conflict for %s resolved as reduce",a)
- self.sr_conflicts.append((st,a,'reduce'))
-
- else:
- raise LALRError("Unknown conflict in state %d" % st)
- else:
- st_action[a] = j
- st_actionp[a] = p
-
- # Print the actions associated with each terminal
- _actprint = { }
- for a,p,m in actlist:
- if a in st_action:
- if p is st_actionp[a]:
- log.info(" %-15s %s",a,m)
- _actprint[(a,m)] = 1
- log.info("")
- # Print the actions that were not used. (debugging)
- not_used = 0
- for a,p,m in actlist:
- if a in st_action:
- if p is not st_actionp[a]:
- if not (a,m) in _actprint:
- log.debug(" ! %-15s [ %s ]",a,m)
- not_used = 1
- _actprint[(a,m)] = 1
- if not_used:
- log.debug("")
-
- # Construct the goto table for this state
-
- nkeys = { }
- for ii in I:
- for s in ii.usyms:
- if s in self.grammar.Nonterminals:
- nkeys[s] = None
- for n in nkeys:
- g = self.lr0_goto(I,n)
- j = self.lr0_cidhash.get(id(g),-1)
- if j >= 0:
- st_goto[n] = j
- log.info(" %-30s shift and go to state %d",n,j)
-
- action[st] = st_action
- actionp[st] = st_actionp
- goto[st] = st_goto
- st += 1
-
-
- # -----------------------------------------------------------------------------
- # write()
- #
- # This function writes the LR parsing tables to a file
- # -----------------------------------------------------------------------------
-
- def write_table(self,modulename,outputdir='',signature=""):
- basemodulename = modulename.split(".")[-1]
- filename = os.path.join(outputdir,basemodulename) + ".py"
- try:
- f = open(filename,"w")
-
- f.write("""
-# %s
-# This file is automatically generated. Do not edit.
-_tabversion = %r
-
-_lr_method = %r
-
-_lr_signature = %r
- """ % (filename, __tabversion__, self.lr_method, signature))
-
- # Change smaller to 0 to go back to original tables
- smaller = 1
-
- # Factor out names to try and make smaller
- if smaller:
- items = { }
-
- for s,nd in self.lr_action.items():
- for name,v in nd.items():
- i = items.get(name)
- if not i:
- i = ([],[])
- items[name] = i
- i[0].append(s)
- i[1].append(v)
-
- f.write("\n_lr_action_items = {")
- for k,v in items.items():
- f.write("%r:([" % k)
- for i in v[0]:
- f.write("%r," % i)
- f.write("],[")
- for i in v[1]:
- f.write("%r," % i)
-
- f.write("]),")
- f.write("}\n")
-
- f.write("""
-_lr_action = { }
-for _k, _v in _lr_action_items.items():
- for _x,_y in zip(_v[0],_v[1]):
- if not _x in _lr_action: _lr_action[_x] = { }
- _lr_action[_x][_k] = _y
-del _lr_action_items
-""")
-
- else:
- f.write("\n_lr_action = { ");
- for k,v in self.lr_action.items():
- f.write("(%r,%r):%r," % (k[0],k[1],v))
- f.write("}\n");
-
- if smaller:
- # Factor out names to try and make smaller
- items = { }
-
- for s,nd in self.lr_goto.items():
- for name,v in nd.items():
- i = items.get(name)
- if not i:
- i = ([],[])
- items[name] = i
- i[0].append(s)
- i[1].append(v)
-
- f.write("\n_lr_goto_items = {")
- for k,v in items.items():
- f.write("%r:([" % k)
- for i in v[0]:
- f.write("%r," % i)
- f.write("],[")
- for i in v[1]:
- f.write("%r," % i)
-
- f.write("]),")
- f.write("}\n")
-
- f.write("""
-_lr_goto = { }
-for _k, _v in _lr_goto_items.items():
- for _x,_y in zip(_v[0],_v[1]):
- if not _x in _lr_goto: _lr_goto[_x] = { }
- _lr_goto[_x][_k] = _y
-del _lr_goto_items
-""")
- else:
- f.write("\n_lr_goto = { ");
- for k,v in self.lr_goto.items():
- f.write("(%r,%r):%r," % (k[0],k[1],v))
- f.write("}\n");
-
- # Write production table
- f.write("_lr_productions = [\n")
- for p in self.lr_productions:
- if p.func:
- f.write(" (%r,%r,%d,%r,%r,%d),\n" % (p.str,p.name, p.len, p.func,p.file,p.line))
- else:
- f.write(" (%r,%r,%d,None,None,None),\n" % (str(p),p.name, p.len))
- f.write("]\n")
- f.close()
-
- except IOError:
- e = sys.exc_info()[1]
- sys.stderr.write("Unable to create '%s'\n" % filename)
- sys.stderr.write(str(e)+"\n")
- return
-
-
- # -----------------------------------------------------------------------------
- # pickle_table()
- #
- # This function pickles the LR parsing tables to a supplied file object
- # -----------------------------------------------------------------------------
-
- def pickle_table(self,filename,signature=""):
- try:
- import cPickle as pickle
- except ImportError:
- import pickle
- outf = open(filename,"wb")
- pickle.dump(__tabversion__,outf,pickle_protocol)
- pickle.dump(self.lr_method,outf,pickle_protocol)
- pickle.dump(signature,outf,pickle_protocol)
- pickle.dump(self.lr_action,outf,pickle_protocol)
- pickle.dump(self.lr_goto,outf,pickle_protocol)
-
- outp = []
- for p in self.lr_productions:
- if p.func:
- outp.append((p.str,p.name, p.len, p.func,p.file,p.line))
- else:
- outp.append((str(p),p.name,p.len,None,None,None))
- pickle.dump(outp,outf,pickle_protocol)
- outf.close()
-
-# -----------------------------------------------------------------------------
-# === INTROSPECTION ===
-#
-# The following functions and classes are used to implement the PLY
-# introspection features followed by the yacc() function itself.
-# -----------------------------------------------------------------------------
-
-# -----------------------------------------------------------------------------
-# get_caller_module_dict()
-#
-# This function returns a dictionary containing all of the symbols defined within
-# a caller further down the call stack. This is used to get the environment
-# associated with the yacc() call if none was provided.
-# -----------------------------------------------------------------------------
-
-def get_caller_module_dict(levels):
- try:
- raise RuntimeError
- except RuntimeError:
- e,b,t = sys.exc_info()
- f = t.tb_frame
- while levels > 0:
- f = f.f_back
- levels -= 1
- ldict = f.f_globals.copy()
- if f.f_globals != f.f_locals:
- ldict.update(f.f_locals)
-
- return ldict
-
-# -----------------------------------------------------------------------------
-# parse_grammar()
-#
-# This takes a raw grammar rule string and parses it into production data
-# -----------------------------------------------------------------------------
-def parse_grammar(doc,file,line):
- grammar = []
- # Split the doc string into lines
- pstrings = doc.splitlines()
- lastp = None
- dline = line
- for ps in pstrings:
- dline += 1
- p = ps.split()
- if not p: continue
- try:
- if p[0] == '|':
- # This is a continuation of a previous rule
- if not lastp:
- raise SyntaxError("%s:%d: Misplaced '|'" % (file,dline))
- prodname = lastp
- syms = p[1:]
- else:
- prodname = p[0]
- lastp = prodname
- syms = p[2:]
- assign = p[1]
- if assign != ':' and assign != '::=':
- raise SyntaxError("%s:%d: Syntax error. Expected ':'" % (file,dline))
-
- grammar.append((file,dline,prodname,syms))
- except SyntaxError:
- raise
- except Exception:
- raise SyntaxError("%s:%d: Syntax error in rule '%s'" % (file,dline,ps.strip()))
-
- return grammar
-
-# -----------------------------------------------------------------------------
-# ParserReflect()
-#
-# This class represents information extracted for building a parser including
-# start symbol, error function, tokens, precedence list, action functions,
-# etc.
-# -----------------------------------------------------------------------------
-class ParserReflect(object):
- def __init__(self,pdict,log=None):
- self.pdict = pdict
- self.start = None
- self.error_func = None
- self.tokens = None
- self.files = {}
- self.grammar = []
- self.error = 0
-
- if log is None:
- self.log = PlyLogger(sys.stderr)
- else:
- self.log = log
-
- # Get all of the basic information
- def get_all(self):
- self.get_start()
- self.get_error_func()
- self.get_tokens()
- self.get_precedence()
- self.get_pfunctions()
-
- # Validate all of the information
- def validate_all(self):
- self.validate_start()
- self.validate_error_func()
- self.validate_tokens()
- self.validate_precedence()
- self.validate_pfunctions()
- self.validate_files()
- return self.error
-
- # Compute a signature over the grammar
- def signature(self):
- try:
- from hashlib import md5
- except ImportError:
- from md5 import md5
- try:
- sig = md5()
- if self.start:
- sig.update(self.start.encode('latin-1'))
- if self.prec:
- sig.update("".join(["".join(p) for p in self.prec]).encode('latin-1'))
- if self.tokens:
- sig.update(" ".join(self.tokens).encode('latin-1'))
- for f in self.pfuncs:
- if f[3]:
- sig.update(f[3].encode('latin-1'))
- except (TypeError,ValueError):
- pass
- return sig.digest()
-
- # -----------------------------------------------------------------------------
- # validate_file()
- #
- # This method checks to see if there are duplicated p_rulename() functions
- # in the parser module file. Without this function, it is really easy for
- # users to make mistakes by cutting and pasting code fragments (and it's a real
- # bugger to try and figure out why the resulting parser doesn't work). Therefore,
- # we just do a little regular expression pattern matching of def statements
- # to try and detect duplicates.
- # -----------------------------------------------------------------------------
-
- def validate_files(self):
- # Match def p_funcname(
- fre = re.compile(r'\s*def\s+(p_[a-zA-Z_0-9]*)\(')
-
- for filename in self.files.keys():
- base,ext = os.path.splitext(filename)
- if ext != '.py': return 1 # No idea. Assume it's okay.
-
- try:
- f = open(filename)
- lines = f.readlines()
- f.close()
- except IOError:
- continue
-
- counthash = { }
- for linen,l in enumerate(lines):
- linen += 1
- m = fre.match(l)
- if m:
- name = m.group(1)
- prev = counthash.get(name)
- if not prev:
- counthash[name] = linen
- else:
- self.log.warning("%s:%d: Function %s redefined. Previously defined on line %d", filename,linen,name,prev)
-
- # Get the start symbol
- def get_start(self):
- self.start = self.pdict.get('start')
-
- # Validate the start symbol
- def validate_start(self):
- if self.start is not None:
- if not isinstance(self.start,str):
- self.log.error("'start' must be a string")
-
- # Look for error handler
- def get_error_func(self):
- self.error_func = self.pdict.get('p_error')
-
- # Validate the error function
- def validate_error_func(self):
- if self.error_func:
- if isinstance(self.error_func,types.FunctionType):
- ismethod = 0
- elif isinstance(self.error_func, types.MethodType):
- ismethod = 1
- else:
- self.log.error("'p_error' defined, but is not a function or method")
- self.error = 1
- return
-
- eline = func_code(self.error_func).co_firstlineno
- efile = func_code(self.error_func).co_filename
- self.files[efile] = 1
-
- if (func_code(self.error_func).co_argcount != 1+ismethod):
- self.log.error("%s:%d: p_error() requires 1 argument",efile,eline)
- self.error = 1
-
- # Get the tokens map
- def get_tokens(self):
- tokens = self.pdict.get("tokens",None)
- if not tokens:
- self.log.error("No token list is defined")
- self.error = 1
- return
-
- if not isinstance(tokens,(list, tuple)):
- self.log.error("tokens must be a list or tuple")
- self.error = 1
- return
-
- if not tokens:
- self.log.error("tokens is empty")
- self.error = 1
- return
-
- self.tokens = tokens
-
- # Validate the tokens
- def validate_tokens(self):
- # Validate the tokens.
- if 'error' in self.tokens:
- self.log.error("Illegal token name 'error'. Is a reserved word")
- self.error = 1
- return
-
- terminals = {}
- for n in self.tokens:
- if n in terminals:
- self.log.warning("Token '%s' multiply defined", n)
- terminals[n] = 1
-
- # Get the precedence map (if any)
- def get_precedence(self):
- self.prec = self.pdict.get("precedence",None)
-
- # Validate and parse the precedence map
- def validate_precedence(self):
- preclist = []
- if self.prec:
- if not isinstance(self.prec,(list,tuple)):
- self.log.error("precedence must be a list or tuple")
- self.error = 1
- return
- for level,p in enumerate(self.prec):
- if not isinstance(p,(list,tuple)):
- self.log.error("Bad precedence table")
- self.error = 1
- return
-
- if len(p) < 2:
- self.log.error("Malformed precedence entry %s. Must be (assoc, term, ..., term)",p)
- self.error = 1
- return
- assoc = p[0]
- if not isinstance(assoc,str):
- self.log.error("precedence associativity must be a string")
- self.error = 1
- return
- for term in p[1:]:
- if not isinstance(term,str):
- self.log.error("precedence items must be strings")
- self.error = 1
- return
- preclist.append((term,assoc,level+1))
- self.preclist = preclist
-
- # Get all p_functions from the grammar
- def get_pfunctions(self):
- p_functions = []
- for name, item in self.pdict.items():
- if name[:2] != 'p_': continue
- if name == 'p_error': continue
- if isinstance(item,(types.FunctionType,types.MethodType)):
- line = func_code(item).co_firstlineno
- file = func_code(item).co_filename
- p_functions.append((line,file,name,item.__doc__))
-
- # Sort all of the actions by line number
- p_functions.sort()
- self.pfuncs = p_functions
-
-
- # Validate all of the p_functions
- def validate_pfunctions(self):
- grammar = []
- # Check for non-empty symbols
- if len(self.pfuncs) == 0:
- self.log.error("no rules of the form p_rulename are defined")
- self.error = 1
- return
-
- for line, file, name, doc in self.pfuncs:
- func = self.pdict[name]
- if isinstance(func, types.MethodType):
- reqargs = 2
- else:
- reqargs = 1
- if func_code(func).co_argcount > reqargs:
- self.log.error("%s:%d: Rule '%s' has too many arguments",file,line,func.__name__)
- self.error = 1
- elif func_code(func).co_argcount < reqargs:
- self.log.error("%s:%d: Rule '%s' requires an argument",file,line,func.__name__)
- self.error = 1
- elif not func.__doc__:
- self.log.warning("%s:%d: No documentation string specified in function '%s' (ignored)",file,line,func.__name__)
- else:
- try:
- parsed_g = parse_grammar(doc,file,line)
- for g in parsed_g:
- grammar.append((name, g))
- except SyntaxError:
- e = sys.exc_info()[1]
- self.log.error(str(e))
- self.error = 1
-
- # Looks like a valid grammar rule
- # Mark the file in which defined.
- self.files[file] = 1
-
- # Secondary validation step that looks for p_ definitions that are not functions
- # or functions that look like they might be grammar rules.
-
- for n,v in self.pdict.items():
- if n[0:2] == 'p_' and isinstance(v, (types.FunctionType, types.MethodType)): continue
- if n[0:2] == 't_': continue
- if n[0:2] == 'p_' and n != 'p_error':
- self.log.warning("'%s' not defined as a function", n)
- if ((isinstance(v,types.FunctionType) and func_code(v).co_argcount == 1) or
- (isinstance(v,types.MethodType) and func_code(v).co_argcount == 2)):
- try:
- doc = v.__doc__.split(" ")
- if doc[1] == ':':
- self.log.warning("%s:%d: Possible grammar rule '%s' defined without p_ prefix",
- func_code(v).co_filename, func_code(v).co_firstlineno,n)
- except Exception:
- pass
-
- self.grammar = grammar
-
-# -----------------------------------------------------------------------------
-# yacc(module)
-#
-# Build a parser
-# -----------------------------------------------------------------------------
-
-def yacc(method='LALR', debug=yaccdebug, module=None, tabmodule=tab_module, start=None,
- check_recursion=1, optimize=0, write_tables=1, debugfile=debug_file,outputdir='',
- debuglog=None, errorlog = None, picklefile=None):
-
- global parse # Reference to the parsing method of the last built parser
-
- # If pickling is enabled, table files are not created
-
- if picklefile:
- write_tables = 0
-
- if errorlog is None:
- errorlog = PlyLogger(sys.stderr)
-
- # Get the module dictionary used for the parser
- if module:
- _items = [(k,getattr(module,k)) for k in dir(module)]
- pdict = dict(_items)
- else:
- pdict = get_caller_module_dict(2)
-
- # Collect parser information from the dictionary
- pinfo = ParserReflect(pdict,log=errorlog)
- pinfo.get_all()
-
- if pinfo.error:
- raise YaccError("Unable to build parser")
-
- # Check signature against table files (if any)
- signature = pinfo.signature()
-
- # Read the tables
- try:
- lr = LRTable()
- if picklefile:
- read_signature = lr.read_pickle(picklefile)
- else:
- read_signature = lr.read_table(tabmodule)
- if optimize or (read_signature == signature):
- try:
- lr.bind_callables(pinfo.pdict)
- parser = LRParser(lr,pinfo.error_func)
- parse = parser.parse
- return parser
- except Exception:
- e = sys.exc_info()[1]
- errorlog.warning("There was a problem loading the table file: %s", repr(e))
- except VersionError:
- e = sys.exc_info()
- errorlog.warning(str(e))
- except Exception:
- pass
-
- if debuglog is None:
- if debug:
- debuglog = PlyLogger(open(debugfile,"w"))
- else:
- debuglog = NullLogger()
-
- debuglog.info("Created by PLY version %s (http://www.dabeaz.com/ply)", __version__)
-
-
- errors = 0
-
- # Validate the parser information
- if pinfo.validate_all():
- raise YaccError("Unable to build parser")
-
- if not pinfo.error_func:
- errorlog.warning("no p_error() function is defined")
-
- # Create a grammar object
- grammar = Grammar(pinfo.tokens)
-
- # Set precedence level for terminals
- for term, assoc, level in pinfo.preclist:
- try:
- grammar.set_precedence(term,assoc,level)
- except GrammarError:
- e = sys.exc_info()[1]
- errorlog.warning("%s",str(e))
-
- # Add productions to the grammar
- for funcname, gram in pinfo.grammar:
- file, line, prodname, syms = gram
- try:
- grammar.add_production(prodname,syms,funcname,file,line)
- except GrammarError:
- e = sys.exc_info()[1]
- errorlog.error("%s",str(e))
- errors = 1
-
- # Set the grammar start symbols
- try:
- if start is None:
- grammar.set_start(pinfo.start)
- else:
- grammar.set_start(start)
- except GrammarError:
- e = sys.exc_info()[1]
- errorlog.error(str(e))
- errors = 1
-
- if errors:
- raise YaccError("Unable to build parser")
-
- # Verify the grammar structure
- undefined_symbols = grammar.undefined_symbols()
- for sym, prod in undefined_symbols:
- errorlog.error("%s:%d: Symbol '%s' used, but not defined as a token or a rule",prod.file,prod.line,sym)
- errors = 1
-
- unused_terminals = grammar.unused_terminals()
- if unused_terminals:
- debuglog.info("")
- debuglog.info("Unused terminals:")
- debuglog.info("")
- for term in unused_terminals:
- errorlog.warning("Token '%s' defined, but not used", term)
- debuglog.info(" %s", term)
-
- # Print out all productions to the debug log
- if debug:
- debuglog.info("")
- debuglog.info("Grammar")
- debuglog.info("")
- for n,p in enumerate(grammar.Productions):
- debuglog.info("Rule %-5d %s", n, p)
-
- # Find unused non-terminals
- unused_rules = grammar.unused_rules()
- for prod in unused_rules:
- errorlog.warning("%s:%d: Rule '%s' defined, but not used", prod.file, prod.line, prod.name)
-
- if len(unused_terminals) == 1:
- errorlog.warning("There is 1 unused token")
- if len(unused_terminals) > 1:
- errorlog.warning("There are %d unused tokens", len(unused_terminals))
-
- if len(unused_rules) == 1:
- errorlog.warning("There is 1 unused rule")
- if len(unused_rules) > 1:
- errorlog.warning("There are %d unused rules", len(unused_rules))
-
- if debug:
- debuglog.info("")
- debuglog.info("Terminals, with rules where they appear")
- debuglog.info("")
- terms = list(grammar.Terminals)
- terms.sort()
- for term in terms:
- debuglog.info("%-20s : %s", term, " ".join([str(s) for s in grammar.Terminals[term]]))
-
- debuglog.info("")
- debuglog.info("Nonterminals, with rules where they appear")
- debuglog.info("")
- nonterms = list(grammar.Nonterminals)
- nonterms.sort()
- for nonterm in nonterms:
- debuglog.info("%-20s : %s", nonterm, " ".join([str(s) for s in grammar.Nonterminals[nonterm]]))
- debuglog.info("")
-
- if check_recursion:
- unreachable = grammar.find_unreachable()
- for u in unreachable:
- errorlog.warning("Symbol '%s' is unreachable",u)
-
- infinite = grammar.infinite_cycles()
- for inf in infinite:
- errorlog.error("Infinite recursion detected for symbol '%s'", inf)
- errors = 1
-
- unused_prec = grammar.unused_precedence()
- for term, assoc in unused_prec:
- errorlog.error("Precedence rule '%s' defined for unknown symbol '%s'", assoc, term)
- errors = 1
-
- if errors:
- raise YaccError("Unable to build parser")
-
- # Run the LRGeneratedTable on the grammar
- if debug:
- errorlog.debug("Generating %s tables", method)
-
- lr = LRGeneratedTable(grammar,method,debuglog)
-
- if debug:
- num_sr = len(lr.sr_conflicts)
-
- # Report shift/reduce and reduce/reduce conflicts
- if num_sr == 1:
- errorlog.warning("1 shift/reduce conflict")
- elif num_sr > 1:
- errorlog.warning("%d shift/reduce conflicts", num_sr)
-
- num_rr = len(lr.rr_conflicts)
- if num_rr == 1:
- errorlog.warning("1 reduce/reduce conflict")
- elif num_rr > 1:
- errorlog.warning("%d reduce/reduce conflicts", num_rr)
-
- # Write out conflicts to the output file
- if debug and (lr.sr_conflicts or lr.rr_conflicts):
- debuglog.warning("")
- debuglog.warning("Conflicts:")
- debuglog.warning("")
-
- for state, tok, resolution in lr.sr_conflicts:
- debuglog.warning("shift/reduce conflict for %s in state %d resolved as %s", tok, state, resolution)
-
- already_reported = {}
- for state, rule, rejected in lr.rr_conflicts:
- if (state,id(rule),id(rejected)) in already_reported:
- continue
- debuglog.warning("reduce/reduce conflict in state %d resolved using rule (%s)", state, rule)
- debuglog.warning("rejected rule (%s) in state %d", rejected,state)
- errorlog.warning("reduce/reduce conflict in state %d resolved using rule (%s)", state, rule)
- errorlog.warning("rejected rule (%s) in state %d", rejected, state)
- already_reported[state,id(rule),id(rejected)] = 1
-
- warned_never = []
- for state, rule, rejected in lr.rr_conflicts:
- if not rejected.reduced and (rejected not in warned_never):
- debuglog.warning("Rule (%s) is never reduced", rejected)
- errorlog.warning("Rule (%s) is never reduced", rejected)
- warned_never.append(rejected)
-
- # Write the table file if requested
- if write_tables:
- lr.write_table(tabmodule,outputdir,signature)
-
- # Write a pickled version of the tables
- if picklefile:
- lr.pickle_table(picklefile,signature)
-
- # Build the parser
- lr.bind_callables(pinfo.pdict)
- parser = LRParser(lr,pinfo.error_func)
-
- parse = parser.parse
- return parser
diff --git a/bitbake/lib/progressbar.py b/bitbake/lib/progressbar.py
deleted file mode 100644
index b668647a36..0000000000
--- a/bitbake/lib/progressbar.py
+++ /dev/null
@@ -1,384 +0,0 @@
-#!/usr/bin/python
-# -*- coding: iso-8859-1 -*-
-#
-# progressbar - Text progressbar library for python.
-# Copyright (c) 2005 Nilton Volpato
-#
-# This library is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Lesser General Public
-# License as published by the Free Software Foundation; either
-# version 2.1 of the License, or (at your option) any later version.
-#
-# This library is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
-# Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
-
-
-"""Text progressbar library for python.
-
-This library provides a text mode progressbar. This is typically used
-to display the progress of a long running operation, providing a
-visual clue that processing is underway.
-
-The ProgressBar class manages the progress, and the format of the line
-is given by a number of widgets. A widget is an object that may
-display diferently depending on the state of the progress. There are
-three types of widget:
-- a string, which always shows itself;
-- a ProgressBarWidget, which may return a diferent value every time
-it's update method is called; and
-- a ProgressBarWidgetHFill, which is like ProgressBarWidget, except it
-expands to fill the remaining width of the line.
-
-The progressbar module is very easy to use, yet very powerful. And
-automatically supports features like auto-resizing when available.
-"""
-
-from __future__ import division
-
-__author__ = "Nilton Volpato"
-__author_email__ = "first-name dot last-name @ gmail.com"
-__date__ = "2006-05-07"
-__version__ = "2.3-dev"
-
-import sys, time, os
-from array import array
-try:
- from fcntl import ioctl
- import termios
-except ImportError:
- pass
-import signal
-try:
- basestring
-except NameError:
- basestring = (str,)
-
-class ProgressBarWidget(object):
- """This is an element of ProgressBar formatting.
-
- The ProgressBar object will call it's update value when an update
- is needed. It's size may change between call, but the results will
- not be good if the size changes drastically and repeatedly.
- """
- def update(self, pbar):
- """Returns the string representing the widget.
-
- The parameter pbar is a reference to the calling ProgressBar,
- where one can access attributes of the class for knowing how
- the update must be made.
-
- At least this function must be overriden."""
- pass
-
-class ProgressBarWidgetHFill(object):
- """This is a variable width element of ProgressBar formatting.
-
- The ProgressBar object will call it's update value, informing the
- width this object must the made. This is like TeX \\hfill, it will
- expand to fill the line. You can use more than one in the same
- line, and they will all have the same width, and together will
- fill the line.
- """
- def update(self, pbar, width):
- """Returns the string representing the widget.
-
- The parameter pbar is a reference to the calling ProgressBar,
- where one can access attributes of the class for knowing how
- the update must be made. The parameter width is the total
- horizontal width the widget must have.
-
- At least this function must be overriden."""
- pass
-
-
-class ETA(ProgressBarWidget):
- "Widget for the Estimated Time of Arrival"
- def format_time(self, seconds):
- return time.strftime('%H:%M:%S', time.gmtime(seconds))
- def update(self, pbar):
- if pbar.currval == 0:
- return 'ETA: --:--:--'
- elif pbar.finished:
- return 'Time: %s' % self.format_time(pbar.seconds_elapsed)
- else:
- elapsed = pbar.seconds_elapsed
- eta = elapsed * pbar.maxval / pbar.currval - elapsed
- return 'ETA: %s' % self.format_time(eta)
-
-class FileTransferSpeed(ProgressBarWidget):
- "Widget for showing the transfer speed (useful for file transfers)."
- def __init__(self, unit='B'):
- self.unit = unit
- self.fmt = '%6.2f %s'
- self.prefixes = ['', 'K', 'M', 'G', 'T', 'P']
- def update(self, pbar):
- if pbar.seconds_elapsed < 2e-6:#== 0:
- bps = 0.0
- else:
- bps = pbar.currval / pbar.seconds_elapsed
- spd = bps
- for u in self.prefixes:
- if spd < 1000:
- break
- spd /= 1000
- return self.fmt % (spd, u + self.unit + '/s')
-
-class RotatingMarker(ProgressBarWidget):
- "A rotating marker for filling the bar of progress."
- def __init__(self, markers='|/-\\'):
- self.markers = markers
- self.curmark = -1
- def update(self, pbar):
- if pbar.finished:
- return self.markers[0]
- self.curmark = (self.curmark + 1) % len(self.markers)
- return self.markers[self.curmark]
-
-class Percentage(ProgressBarWidget):
- "Just the percentage done."
- def update(self, pbar):
- return '%3d%%' % pbar.percentage()
-
-class SimpleProgress(ProgressBarWidget):
- "Returns what is already done and the total, e.g.: '5 of 47'"
- def __init__(self, sep=' of '):
- self.sep = sep
- def update(self, pbar):
- return '%d%s%d' % (pbar.currval, self.sep, pbar.maxval)
-
-class Bar(ProgressBarWidgetHFill):
- "The bar of progress. It will stretch to fill the line."
- def __init__(self, marker='#', left='|', right='|'):
- self.marker = marker
- self.left = left
- self.right = right
- def _format_marker(self, pbar):
- if isinstance(self.marker, basestring):
- return self.marker
- else:
- return self.marker.update(pbar)
- def update(self, pbar, width):
- percent = pbar.percentage()
- cwidth = width - len(self.left) - len(self.right)
- marked_width = int(percent * cwidth // 100)
- m = self._format_marker(pbar)
- bar = (self.left + (m * marked_width).ljust(cwidth) + self.right)
- return bar
-
-class ReverseBar(Bar):
- "The reverse bar of progress, or bar of regress. :)"
- def update(self, pbar, width):
- percent = pbar.percentage()
- cwidth = width - len(self.left) - len(self.right)
- marked_width = int(percent * cwidth // 100)
- m = self._format_marker(pbar)
- bar = (self.left + (m*marked_width).rjust(cwidth) + self.right)
- return bar
-
-default_widgets = [Percentage(), ' ', Bar()]
-class ProgressBar(object):
- """This is the ProgressBar class, it updates and prints the bar.
-
- A common way of using it is like:
- >>> pbar = ProgressBar().start()
- >>> for i in xrange(100):
- ... # do something
- ... pbar.update(i+1)
- ...
- >>> pbar.finish()
-
- You can also use a progressbar as an iterator:
- >>> progress = ProgressBar()
- >>> for i in progress(some_iterable):
- ... # do something
- ...
-
- But anything you want to do is possible (well, almost anything).
- You can supply different widgets of any type in any order. And you
- can even write your own widgets! There are many widgets already
- shipped and you should experiment with them.
-
- The term_width parameter must be an integer or None. In the latter case
- it will try to guess it, if it fails it will default to 80 columns.
-
- When implementing a widget update method you may access any
- attribute or function of the ProgressBar object calling the
- widget's update method. The most important attributes you would
- like to access are:
- - currval: current value of the progress, 0 <= currval <= maxval
- - maxval: maximum (and final) value of the progress
- - finished: True if the bar has finished (reached 100%), False o/w
- - start_time: the time when start() method of ProgressBar was called
- - seconds_elapsed: seconds elapsed since start_time
- - percentage(): percentage of the progress [0..100]. This is a method.
-
- The attributes above are unlikely to change between different versions,
- the other ones may change or cease to exist without notice, so try to rely
- only on the ones documented above if you are extending the progress bar.
- """
-
- __slots__ = ('currval', 'fd', 'finished', 'last_update_time', 'maxval',
- 'next_update', 'num_intervals', 'seconds_elapsed',
- 'signal_set', 'start_time', 'term_width', 'update_interval',
- 'widgets', '_iterable')
-
- _DEFAULT_MAXVAL = 100
-
- def __init__(self, maxval=None, widgets=default_widgets, term_width=None,
- fd=sys.stderr):
- self.maxval = maxval
- self.widgets = widgets
- self.fd = fd
- self.signal_set = False
- if term_width is not None:
- self.term_width = term_width
- else:
- try:
- self._handle_resize(None, None)
- signal.signal(signal.SIGWINCH, self._handle_resize)
- self.signal_set = True
- except (SystemExit, KeyboardInterrupt):
- raise
- except:
- self.term_width = int(os.environ.get('COLUMNS', 80)) - 1
-
- self.currval = 0
- self.finished = False
- self.start_time = None
- self.last_update_time = None
- self.seconds_elapsed = 0
- self._iterable = None
-
- def __call__(self, iterable):
- try:
- self.maxval = len(iterable)
- except TypeError:
- # If the iterable has no length, then rely on the value provided
- # by the user, otherwise fail.
- if not (isinstance(self.maxval, (int, long)) and self.maxval > 0):
- raise RuntimeError('Could not determine maxval from iterable. '
- 'You must explicitly provide a maxval.')
- self._iterable = iter(iterable)
- self.start()
- return self
-
- def __iter__(self):
- return self
-
- def next(self):
- try:
- next = self._iterable.next()
- self.update(self.currval + 1)
- return next
- except StopIteration:
- self.finish()
- raise
-
- def _handle_resize(self, signum, frame):
- h, w = array('h', ioctl(self.fd, termios.TIOCGWINSZ, '\0' * 8))[:2]
- self.term_width = w
-
- def percentage(self):
- "Returns the percentage of the progress."
- return self.currval * 100.0 / self.maxval
-
- def _format_widgets(self):
- r = []
- hfill_inds = []
- num_hfill = 0
- currwidth = 0
- for i, w in enumerate(self.widgets):
- if isinstance(w, ProgressBarWidgetHFill):
- r.append(w)
- hfill_inds.append(i)
- num_hfill += 1
- elif isinstance(w, basestring):
- r.append(w)
- currwidth += len(w)
- else:
- weval = w.update(self)
- currwidth += len(weval)
- r.append(weval)
- for iw in hfill_inds:
- widget_width = int((self.term_width - currwidth) // num_hfill)
- r[iw] = r[iw].update(self, widget_width)
- return r
-
- def _format_line(self):
- return ''.join(self._format_widgets()).ljust(self.term_width)
-
- def _next_update(self):
- return int((int(self.num_intervals *
- (self.currval / self.maxval)) + 1) *
- self.update_interval)
-
- def _need_update(self):
- """Returns true when the progressbar should print an updated line.
-
- You can override this method if you want finer grained control over
- updates.
-
- The current implementation is optimized to be as fast as possible and
- as economical as possible in the number of updates. However, depending
- on your usage you may want to do more updates. For instance, if your
- progressbar stays in the same percentage for a long time, and you want
- to update other widgets, like ETA, then you could return True after
- some time has passed with no updates.
-
- Ideally you could call self._format_line() and see if it's different
- from the previous _format_line() call, but calling _format_line() takes
- around 20 times more time than calling this implementation of
- _need_update().
- """
- return self.currval >= self.next_update
-
- def update(self, value):
- "Updates the progress bar to a new value."
- assert 0 <= value <= self.maxval, '0 <= %d <= %d' % (value, self.maxval)
- self.currval = value
- if not self._need_update():
- return
- if self.start_time is None:
- raise RuntimeError('You must call start() before calling update()')
- now = time.time()
- self.seconds_elapsed = now - self.start_time
- self.next_update = self._next_update()
- self.fd.write(self._format_line() + '\r')
- self.last_update_time = now
-
- def start(self):
- """Starts measuring time, and prints the bar at 0%.
-
- It returns self so you can use it like this:
- >>> pbar = ProgressBar().start()
- >>> for i in xrange(100):
- ... # do something
- ... pbar.update(i+1)
- ...
- >>> pbar.finish()
- """
- if self.maxval is None:
- self.maxval = self._DEFAULT_MAXVAL
- assert self.maxval > 0
-
- self.num_intervals = max(100, self.term_width)
- self.update_interval = self.maxval / self.num_intervals
- self.next_update = 0
-
- self.start_time = self.last_update_time = time.time()
- self.update(0)
- return self
-
- def finish(self):
- """Used to tell the progress is finished."""
- self.finished = True
- self.update(self.maxval)
- self.fd.write('\n')
- if self.signal_set:
- signal.signal(signal.SIGWINCH, signal.SIG_DFL)