--- /dev/null
+[run]
+omit =
+ .tox/*
--- /dev/null
+.idea
+*.pyc
+.tox/
+.pytest-cache/
+.coverage-*
+htmlcov
--- /dev/null
+[MASTER]
+
+# Specify a configuration file.
+#rcfile=
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+#init-hook=
+
+# Add files or directories to the blacklist. They should be base names, not
+# paths.
+ignore=CVS
+
+# Pickle collected data for later comparisons.
+persistent=yes
+
+# List of plugins (as comma separated values of python modules names) to load,
+# usually to register additional checkers.
+load-plugins=
+
+# Use multiple processes to speed up Pylint.
+jobs=1
+
+# Allow loading of arbitrary C extensions. Extensions are imported into the
+# active Python interpreter and may run arbitrary code.
+unsafe-load-any-extension=no
+
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code
+extension-pkg-whitelist=
+
+# Allow optimization of some AST trees. This will activate a peephole AST
+# optimizer, which will apply various small optimizations. For instance, it can
+# be used to obtain the result of joining multiple strings with the addition
+# operator. Joining a lot of strings can lead to a maximum recursion error in
+# Pylint and this flag can prevent that. It has one side effect, the resulting
+# AST will be different than the one from reality.
+optimize-ast=no
+
+
+[MESSAGES CONTROL]
+
+# Only show warnings with the listed confidence levels. Leave empty to show
+# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
+confidence=
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple time. See also the "--disable" option for examples.
+#enable=
+
+# Disable the message, report, category or checker with the given id(s). You
+# can either give multiple identifiers separated by comma (,) or put this
+# option multiple times (only on the command line, not in the configuration
+# file where it should appear only once).You can also use "--disable=all" to
+# disable everything first and then reenable specific checks. For example, if
+# you want to run only the similarities checker, you can use "--disable=all
+# --enable=similarities". If you want to run only the classes checker, but have
+# no Warning level messages displayed, use"--disable=all --enable=classes
+# --disable=W"
+disable=import-star-module-level,old-octal-literal,oct-method,print-statement,unpacking-in-except,parameter-unpacking,backtick,old-raise-syntax,old-ne-operator,long-suffix,dict-view-method,dict-iter-method,metaclass-assignment,next-method-called,raising-string,indexing-exception,raw_input-builtin,long-builtin,file-builtin,execfile-builtin,coerce-builtin,cmp-builtin,buffer-builtin,basestring-builtin,apply-builtin,filter-builtin-not-iterating,using-cmp-argument,useless-suppression,range-builtin-not-iterating,suppressed-message,no-absolute-import,old-division,cmp-method,reload-builtin,zip-builtin-not-iterating,intern-builtin,unichr-builtin,reduce-builtin,standarderror-builtin,unicode-builtin,xrange-builtin,coerce-method,delslice-method,getslice-method,setslice-method,input-builtin,round-builtin,hex-method,nonzero-method,map-builtin-not-iterating,max-line-length,missing-docstring,logging-too-many-args,import-error, line-too-long,no-name-in-module
+
+
+[REPORTS]
+
+# Set the output format. Available formats are text, parseable, colorized, msvs
+# (visual studio) and html. You can also give a reporter class, eg
+# mypackage.mymodule.MyReporterClass.
+output-format=text
+
+# Put messages in a separate file for each module / package specified on the
+# command line instead of printing them on stdout. Reports (if any) will be
+# written in a file name "pylint_global.[txt|html]".
+files-output=no
+
+# Tells whether to display a full report or only the messages
+reports=yes
+
+# Python expression which should return a note less than 10 (10 is the highest
+# note). You have access to the variables errors warning, statement which
+# respectively contain the number of errors / warnings messages and the total
+# number of statements analyzed. This is used by the global evaluation report
+# (RP0004).
+evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
+
+# Template used to display messages. This is a python new-style format string
+# used to format the message information. See doc for all details
+#msg-template=
+
+
+[LOGGING]
+
+# Logging modules to check that the string format arguments are in logging
+# function parameter format
+logging-modules=logging
+
+
+[BASIC]
+
+# List of builtins function names that should not be used, separated by a comma
+bad-functions=map,filter,input
+
+# Good variable names which should always be accepted, separated by a comma
+good-names=i,j,k,ex,Run,_
+
+# Bad variable names which should always be refused, separated by a comma
+bad-names=foo,bar,baz,toto,tutu,tata
+
+# Colon-delimited sets of names that determine each other's naming style when
+# the name regexes allow several styles.
+name-group=
+
+# Include a hint for the correct naming format with invalid-name
+include-naming-hint=no
+
+# Regular expression matching correct function names
+function-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Naming hint for function names
+function-name-hint=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression matching correct variable names
+variable-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Naming hint for variable names
+variable-name-hint=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression matching correct constant names
+const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
+
+# Naming hint for constant names
+const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
+
+# Regular expression matching correct attribute names
+attr-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Naming hint for attribute names
+attr-name-hint=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression matching correct argument names
+argument-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Naming hint for argument names
+argument-name-hint=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression matching correct class attribute names
+class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
+
+# Naming hint for class attribute names
+class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
+
+# Regular expression matching correct inline iteration names
+inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
+
+# Naming hint for inline iteration names
+inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
+
+# Regular expression matching correct class names
+class-rgx=[A-Z_][a-zA-Z0-9]+$
+
+# Naming hint for class names
+class-name-hint=[A-Z_][a-zA-Z0-9]+$
+
+# Regular expression matching correct module names
+module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
+
+# Naming hint for module names
+module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
+
+# Regular expression matching correct method names
+method-rgx=[a-z_][a-z0-9_]{2,30}$
+
+# Naming hint for method names
+method-name-hint=[a-z_][a-z0-9_]{2,30}$
+
+# Regular expression which should only match function or class names that do
+# not require a docstring.
+no-docstring-rgx=^_
+
+# Minimum line length for functions/classes that require docstrings, shorter
+# ones are exempt.
+docstring-min-length=-1
+
+
+[ELIF]
+
+# Maximum number of nested blocks for function / method body
+max-nested-blocks=5
+
+
+[FORMAT]
+
+# Maximum number of characters on a single line.
+max-line-length=100
+
+# Regexp for a line that is allowed to be longer than the limit.
+ignore-long-lines=^\s*(# )?<?https?://\S+>?$
+
+# Allow the body of an if to be on the same line as the test if there is no
+# else.
+single-line-if-stmt=no
+
+# List of optional constructs for which whitespace checking is disabled. `dict-
+# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
+# `trailing-comma` allows a space between comma and closing bracket: (a, ).
+# `empty-line` allows space-only lines.
+no-space-check=trailing-comma,dict-separator
+
+# Maximum number of lines in a module
+max-module-lines=1000
+
+# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
+# tab).
+indent-string=' '
+
+# Number of spaces of indent required inside a hanging or continued line.
+indent-after-paren=4
+
+# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
+expected-line-ending-format=
+
+
+[SIMILARITIES]
+
+# Minimum lines number of a similarity.
+min-similarity-lines=4
+
+# Ignore comments when computing similarities.
+ignore-comments=yes
+
+# Ignore docstrings when computing similarities.
+ignore-docstrings=yes
+
+# Ignore imports when computing similarities.
+ignore-imports=no
+
+
+[VARIABLES]
+
+# Tells whether we should check for unused import in __init__ files.
+init-import=no
+
+# A regular expression matching the name of dummy variables (i.e. expectedly
+# not used).
+dummy-variables-rgx=_$|dummy
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid to define new builtins when possible.
+additional-builtins=
+
+# List of strings which can identify a callback function by name. A callback
+# name must start or end with one of those strings.
+callbacks=cb_,_cb
+
+
+[SPELLING]
+
+# Spelling dictionary name. Available dictionaries: none. To make it working
+# install python-enchant package.
+spelling-dict=
+
+# List of comma separated words that should not be checked.
+spelling-ignore-words=
+
+# A path to a file that contains private dictionary; one word per line.
+spelling-private-dict-file=
+
+# Tells whether to store unknown words to indicated private dictionary in
+# --spelling-private-dict-file option instead of raising a message.
+spelling-store-unknown-words=no
+
+
+[MISCELLANEOUS]
+
+# List of note tags to take in consideration, separated by a comma.
+notes=FIXME,XXX,TODO
+
+
+[TYPECHECK]
+
+# Tells whether missing members accessed in mixin class should be ignored. A
+# mixin class is detected if its name ends with "mixin" (case insensitive).
+ignore-mixin-members=yes
+
+# List of module names for which member attributes should not be checked
+# (useful for modules/projects where namespaces are manipulated during runtime
+# and thus existing member attributes cannot be deduced by static analysis. It
+# supports qualified module names, as well as Unix pattern matching.
+ignored-modules=
+
+# List of classes names for which member attributes should not be checked
+# (useful for classes with attributes dynamically set). This supports can work
+# with qualified names.
+ignored-classes=
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E1101 when accessed. Python regular
+# expressions are accepted.
+generated-members=
+
+
+[IMPORTS]
+
+# Deprecated modules which should not be used, separated by a comma
+deprecated-modules=regsub,TERMIOS,Bastion,rexec
+
+# Create a graph of every (i.e. internal and external) dependencies in the
+# given file (report RP0402 must not be disabled)
+import-graph=
+
+# Create a graph of external dependencies in the given file (report RP0402 must
+# not be disabled)
+ext-import-graph=
+
+# Create a graph of internal dependencies in the given file (report RP0402 must
+# not be disabled)
+int-import-graph=
+
+
+[CLASSES]
+
+# List of method names used to declare (i.e. assign) instance attributes.
+defining-attr-methods=__init__,__new__,setUp
+
+# List of valid names for the first argument in a class method.
+valid-classmethod-first-arg=cls
+
+# List of valid names for the first argument in a metaclass class method.
+valid-metaclass-classmethod-first-arg=mcs
+
+# List of member names, which should be excluded from the protected access
+# warning.
+exclude-protected=_asdict,_fields,_replace,_source,_make
+
+
+[DESIGN]
+
+# Maximum number of arguments for function / method
+max-args=5
+
+# Argument names that match this expression will be ignored. Default to name
+# with leading underscore
+ignored-argument-names=_.*
+
+# Maximum number of locals for function / method body
+max-locals=15
+
+# Maximum number of return / yield for function / method body
+max-returns=6
+
+# Maximum number of branch for function / method body
+max-branches=12
+
+# Maximum number of statements in function / method body
+max-statements=50
+
+# Maximum number of parents for a class (see R0901).
+max-parents=7
+
+# Maximum number of attributes for a class (see R0902).
+max-attributes=7
+
+# Minimum number of public methods for a class (see R0903).
+min-public-methods=2
+
+# Maximum number of public methods for a class (see R0904).
+max-public-methods=20
+
+# Maximum number of boolean expressions in a if statement
+max-bool-expr=5
+
+
+[EXCEPTIONS]
+
+# Exceptions that will emit a warning when being caught. Defaults to
+# "Exception"
+overgeneral-exceptions=Exception
--- /dev/null
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
--- /dev/null
+# RPMBuilder
+
+This tool allows you to build RPM files in mock environment.
+
+## What is Mock?
+
+Mock is a tool for building packages. It can build packages for different architectures and
+different Fedora or RHEL versions than the build host has. Mock creates chroots and builds packages
+in them. Its only task is to reliably populate a chroot and attempt to build a package in that
+chroot.
+
+Source: https://fedoraproject.org/wiki/Mock?rd=Subprojects/Mock
+
+
+## How does rpmbuilder work?
+
+Tool reads user provided configuration and creates checkouts of Mock settings and Git hosted
+projects. Dependency mapping between projects is created from spec files found in every project.
+When a project is build the spec file is patched so that "Version:" field is set to "git describe"
+value. The "Release:" field is incremented based on existing rpm files.
+
+The idea for this tool has been taken from openSuse Build Service (OBS). Own tool was created since
+configuring OBS takes a lot of work and it does not have great support for password protected Git
+repositories.
+
+
+## Prerequisite
+
+### Installing mock and createrepo
+
+As a requirement for the tool to work, you need to install rpm building tool "mock" and repository
+tool "createrepo" to your host.
+
+In Redhat/Fedora:
+```
+$ yum install mock createrepo rpmdevtools
+```
+
+Ubuntu:
+```
+$ apt-get install mock createrepo
+```
+
+### Assign users to mock group
+
+Users of mock also need to belong to group "mock". This allows them to run mock which uses chroot.
+
+Create mock group if it does not exists in your host
+```
+$ getent group mock || groupadd mock
+```
+
+Add yourself to mock group
+```
+$ usermod -a -G mock <username>
+```
+
+
+## Running script
+
+Create a workspace directory which builder can use to do checkouts and compilation. This directory
+should have sufficient space to store your checkouts and rpm files.
+
+Example:
+```
+$ mkdir /home/<username>/rpmworkspace
+```
+
+### Building project to a rpm file
+
+You can build local projects as rpm files. This is useful if you are developing a project and want
+to create rpm files without commiting to version control system.
+
+Example of building helloworld to rpm:
+```
+$ ./makebuild.py -w /home/<username>/rpmworkspace /home/<username>/helloworld
+```
+
+If you want to reconfigure Mock environment (e.g. extra Yum repositories) to be available during
+building, create a copy of default Mock configuration and provide it with -m option.
+
+Example:
+```
+$ cp defaults/epel-7-x86_64.cfg ~/mymock.cfg
+$ vim ~/mymock.cfg
+$ ./makebuild.py -w /home/<username>/rpmworkspace -m ~/mymock.cfg /home/<username>/helloworld
+```
+
+Note:
+ - RPM package version is created from "git describe". If git describe cannot be used, package
+ version is hard coded to a.b
+
+### Access built RPM packages
+
+If there are no errors during building, rpm files can be copied from build repository under your
+workspace directory.
+Example: /home/<username>/rpmworkspace/buildrepository/epel-7-x86_64/
+
+
+## RPM file name convention
+
+Rpm packages are named and versioned so that it can be found from version control system. Rpmbuilder
+uses command "git describe --dirty --tags" to produce a rpm package file name. If git is unable to
+describe the checkout a another "git describe --dirty --all --always" command is used.
+
+**Example 1:** Clone with no tags
+File name mymodule-master.c110.gce32b26-1.el7.x86_64.rpm states that package mymodule has been made
+from master branch. Package was made from 110th commit and this commit has git hash ce32b26.
+
+**Example 2:** Clone with tag 1.0
+File name mymodule-1.0-1.el7.x86_64.rpm shows that mymodule package was made from tag 1.0.
+
+**Example 3:** Clone with two changes on top of tag 1.0 git clone
+File mymodule-1.0.c2.gad96bc2-1.el7.x86_64.rpm shows two changes have been made on top of 1.0 and
+also the identifying hash.
+
+**Example 4:** Clone from Example 3 and local changes
+File mymodule-master.dirty.c112.g1.0.2.g8193b3a-1.el7.x86_64.rpm shows that the clone directory
+contains local modifications which make it dirty.
+
+
+## More usage examples
+
+### Storing build products to remote server
+
+If projects are built in Jenkins, there is always danger that somebody might wipe the workspace. To
+protect against workspace wiping you can use stashmakebuild.py script. In your build configuration
+file define remote server and directory.
+
+When building starts script checks from your workspace that you have directories for projects and
+builder. If these are missing, your remote server/directory is used to pull previous build state.
+After each successful build your project configuration and rpm files are stored to remote server.
+
+With safemakebuild.py you need to use two additional properties: --remotehost and --remotedir.
+
+### Create package with version taken from Git
+
+To read package verion from Git ("git describe") set the package Version directive in spec file as
+"%{_version}".
+
+### Build multiple Git projects with a build configuration file
+
+Create a configuration file which contains information about the projects and mock environment.
+Syntax for the configuration file can be copied from provided configuration-example.ini file.
+
+Example:
+```
+$ cp configuration-example.ini /home/<username>/buildconfig.ini
+$ vim /home/<username>/buildconfig.ini
+$ ./makebuild.py -w /home/<username>/rpmworkspace -b /home/<username>/buildconfig.ini
+```
+
+Note:
+ - Builder keeps track of Git commit hash. Rpm building if done if project hash has changed.
+ - If commit hash has not changed since previous build, building is skipped.
+
+
+## Known issues
+
+1. If you are not using RedHat, CentOS or Fedora building hosts you might have problems running
+ mock. Build requirements that have been installed by rpmbuilder to mock chroot, might not be
+recognized by rpm tools during building. Because of this rpm building will fail complaining that
+necessary build requirements are missing.
+
+If your components do not have build requirements to each other, then there are no problems. This
+problem has been seen with Debian based distributions and it is a known bug:
+https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=794495
+
+Also spectool seems to be missing from Debian based packages. Without this tool rpmbuilder is
+cripled to work only with local source files. As long as you do not have any spec lines such as
+"SourceX http://example.com/package.tgz" you are safe.
+See **Debian custom spectool intallation** chapter how to install spectool to your home directory.
+
+
+2. If you change git repository url this will force rebuilding of this project even if the hash of
+ the code remains unchanged.
+
+### Debian custom spectool intallation
+
+Clone spectool git somewhere in your home directory. For example to ~/gitrepos -directory:
+```
+cd gitrepos
+git clone https://pagure.io/spectool.git
+```
+Create a symbolic link to spectool where your search PATH can find it.
+```
+ln -s ~/gitrepos/spectool/spectool ~/bin/
+```
--- /dev/null
+## List all projects separately. Mandatory fields for every project
+## are type, url and ref. If your project contains more than one spec
+## file you need to define also which spec file should be used.
+
+## Example of rpm package/software called Project-name
+
+## Define project name. This is used to name git clones
+## and does not affect the rpm content
+#[helloworld]
+
+## Define git url. Make sure you have all read access to the repository.
+## You can use url formats which are compatible with normal git command.
+#url = git@gitlab:project/helloworld.git
+
+## Define type as project.
+#type = project
+
+## ref should be a git tag, branch or hash from where you would like to
+## get your source and spec files
+#ref = master
+
+## IF you have more than one spec file, specify which spec file should
+## be used. This is optional
+#spec = helloworld.spec
+
+## IF you want to enable/disable entry from building, define enabled
+## variable as 0/false/off or 1/true/on. This is optional
+#enabled = 1
+
+## Mock settings define the compilation environment. Easiest way to get a working mock settings
+## is to fork/branch the below repository if the default settings do not work for you.
+## Multiple roots can be separated with comma.
+
+[mock]
+url = git@gitlab:project/mocksettings.git
+ref = master
+roots = epel-7-x86_64
+
+## sshbackup defines a remote host where build is stored when done.
+## This backup also restores workspace if disk is cleaned
+[sshbackup]
+remotehost = localhost.example.com
+remotedir = /var/www/html/mybuild
--- /dev/null
+# Root name to be used for chroot and caching, must differ between products
+config_opts['root'] = 'epel-7-x86_64'
+
+config_opts['target_arch'] = 'x86_64'
+config_opts['legal_host_arches'] = ('x86_64',)
+config_opts['dist'] = 'el7' # only useful for --resultdir variable subst
+config_opts['chroot_setup_cmd'] = 'install createrepo yum-utils bison byacc cscope ctags cvs diffstat doxygen flex gcc gcc-c++ gcc-gfortran gettext git indent intltool libtool patch patchutils rcs redhat-rpm-config rpm-build subversion swig systemtap sudo'
+config_opts['plugin_conf']['yum_cache_enable'] = False
+config_opts['plugin_conf']['ccache_enable'] = False
+config_opts['plugin_conf']['ccache_opts']['max_cache_size'] = '1G'
+config_opts['plugin_conf']['ccache_opts']['dir'] = "/dev/shm/ccache.epel-7/"
+config_opts['rpmbuild_networking'] = True
+config_opts['cleanup_on_success'] = True
+config_opts['cleanup_on_failure'] = False
+config_opts['exclude_from_homedir_cleanup'] = ('build/SOURCES', '.bash_history', '.bashrc', 'build/RPMS', )
+
+# Common RPM directive values
+config_opts['macros']['%_platform_vendor']="My Product"
+config_opts['macros']['%_platform_licence']="ASL 2.0"
+
+# Product specific macros
+config_opts['macros']['%_platform_root_path'] = "/opt/product"
+
+# Compilation
+#config_opts['macros']['%_smp_mflags'] = "-j6"
+#config_opts['macros']['%_smp_ncpus_max'] = 0
+
+# Yum configuration
+config_opts['yum.conf'] = """
+[main]
+cachedir=/var/cache/yum
+keepcache=1
+debuglevel=2
+reposdir=/dev/null
+logfile=/var/log/yum.log
+retries=20
+obsoletes=1
+gpgcheck=0
+assumeyes=1
+syslog_ident=mock
+syslog_device=
+
+# RPM repositories for yum
+[internal-epel-mirror]
+name=internal-epel-mirror
+baseurl=http://intranet.mycompany.org/mirrors/EPEL/7/$basearch/
+enabled=0
+metadata_expire=7d
+gpgcheck=0
+
+[local]
+name=local repository
+baseurl=file:///usr/localrepo/
+enabled=1
+gpgcheck=0
+protect=1
+priority=10
+skip_if_unavailable=True
+
+"""
--- /dev/null
+[formatters]
+keys: detailed,simple,unadorned,state
+
+[handlers]
+keys: simple_console,detailed_console,unadorned_console,simple_console_warnings_only
+
+[loggers]
+keys: root,build,state,mockbuild
+
+[formatter_state]
+format: %(asctime)s - %(message)s
+
+[formatter_unadorned]
+format: %(message)s
+
+[formatter_simple]
+format: %(levelname)s: %(message)s
+
+;useful for debugging:
+[formatter_detailed]
+format: %(levelname)s %(filename)s:%(lineno)d: %(message)s
+
+[handler_unadorned_console]
+class: StreamHandler
+args: []
+formatter: unadorned
+level: INFO
+
+[handler_simple_console]
+class: StreamHandler
+args: []
+formatter: simple
+level: INFO
+
+[handler_simple_console_warnings_only]
+class: StreamHandler
+args: []
+formatter: simple
+level: WARNING
+
+[handler_detailed_console]
+class: StreamHandler
+args: []
+formatter: detailed
+level: WARNING
+
+; usually dont want to set a level for loggers
+; this way all handlers get all messages, and messages can be filtered
+; at the handler level
+;
+; all these loggers default to a console output handler
+;
+[logger_root]
+level: NOTSET
+handlers: simple_console
+
+; mockbuild logger normally has no output
+; catches stuff like mockbuild.trace_decorator and mockbuild.util
+; dont normally want to propagate to root logger, either
+[logger_mockbuild]
+level: NOTSET
+handlers:
+qualname: mockbuild
+propagate: 1
+
+[logger_state]
+level: NOTSET
+; unadorned_console only outputs INFO or above
+handlers: unadorned_console
+qualname: mockbuild.Root.state
+propagate: 0
+
+[logger_build]
+level: NOTSET
+handlers: simple_console_warnings_only
+qualname: mockbuild.Root.build
+propagate: 0
+
+; the following is a list mock logger qualnames used within the code:
+;
+; qualname: mockbuild.util
+; qualname: mockbuild.uid
+; qualname: mockbuild.trace_decorator
+
--- /dev/null
+# mock defaults
+# vim:tw=0:ts=4:sw=4:et:
+#
+# This config file is for site-specific default values that apply across all
+# configurations. Options specified in this config file can be overridden in
+# the individual mock config files.
+#
+# The site-defaults.cfg delivered by default has NO options set. Only set
+# options here if you want to override the defaults.
+#
+# Entries in this file follow the same format as other mock config files.
+# config_opts['foo'] = bar
+
+#############################################################################
+#
+# Things that we recommend you set in site-defaults.cfg:
+#
+# config_opts['basedir'] = '/var/lib/mock/'
+# config_opts['cache_topdir'] = '/var/cache/mock'
+# Note: the path pointed to by basedir and cache_topdir must be owned
+# by group 'mock' and must have mode: g+rws
+# config_opts['rpmbuild_timeout'] = 0
+# config_opts['use_host_resolv'] = True
+
+# You can configure log format to pull from logging.ini formats of these names:
+# config_opts['build_log_fmt_name'] = "unadorned"
+# config_opts['root_log_fmt_name'] = "detailed"
+# config_opts['state_log_fmt_name'] = "state"
+#
+# mock will normally set up a minimal chroot /dev.
+# If you want to use a pre-configured /dev, disable this and use the bind-mount
+# plugin to mount your special /dev
+# config_opts['internal_dev_setup'] = True
+#
+# internal_setarch defaults to 'True' if the python 'ctypes' package is
+# available. It is in the python std lib on >= python 2.5. On older versions,
+# it is available as an addon. On systems w/o ctypes, it will default to 'False'
+# config_opts['internal_setarch'] = False
+#
+# the cleanup_on_* options allow you to automatically clean and remove the
+# mock build directory, but only take effect if --resultdir is used.
+# config_opts provides fine-grained control. cmdline only has big hammer
+#
+# config_opts['cleanup_on_success'] = 1
+# config_opts['cleanup_on_failure'] = 1
+
+# if you want mock to automatically run createrepo on the rpms in your
+# resultdir.
+# config_opts['createrepo_on_rpms'] = False
+# config_opts['createrepo_command'] = '/usr/bin/createrepo -d -q -x *.src.rpm'
+
+# if you want mock to backup the contents of a result dir before clean
+# config_opts['backup_on_clean'] = False
+# config_opts('backup_base_dir'] = config_opts['basedir'] + "backup"
+
+
+#############################################################################
+#
+# plugin related. Below are the defaults. Change to suit your site
+# policy. site-defaults.cfg is a good place to do this.
+#
+# NOTE: Some of the caching options can theoretically affect build
+# reproducability. Change with care.
+#
+# config_opts['plugin_conf']['package_state_enable'] = True
+# config_opts['plugin_conf']['ccache_enable'] = True
+# config_opts['plugin_conf']['ccache_opts']['max_cache_size'] = '4G'
+# config_opts['plugin_conf']['ccache_opts']['compress'] = None
+# config_opts['plugin_conf']['ccache_opts']['dir'] = "%(cache_topdir)s/%(root)s/ccache/"
+# config_opts['plugin_conf']['yum_cache_enable'] = True
+# config_opts['plugin_conf']['yum_cache_opts']['max_age_days'] = 30
+# config_opts['plugin_conf']['yum_cache_opts']['dir'] = "%(cache_topdir)s/%(root)s/yum_cache/"
+# config_opts['plugin_conf']['root_cache_enable'] = True
+# config_opts['plugin_conf']['root_cache_opts']['max_age_days'] = 15
+# config_opts['plugin_conf']['root_cache_opts']['dir'] = "%(cache_topdir)s/%(root)s/root_cache/"
+# config_opts['plugin_conf']['root_cache_opts']['compress_program'] = "pigz"
+# config_opts['plugin_conf']['root_cache_opts']['extension'] = ".gz"
+# config_opts['plugin_conf']['root_cache_opts']['exclude_dirs'] = ["./proc", "./sys", "./dev",
+# "./tmp/ccache", "./var/cache/yum" ]
+#
+# bind mount plugin is enabled by default but has no configured directories to
+# mount
+# config_opts['plugin_conf']['bind_mount_enable'] = True
+# config_opts['plugin_conf']['bind_mount_opts']['dirs'].append(('/host/path', '/bind/mount/path/in/chroot/' ))
+#
+# config_opts['plugin_conf']['tmpfs_enable'] = False
+# config_opts['plugin_conf']['tmpfs_opts']['required_ram_mb'] = 1024
+# config_opts['plugin_conf']['tmpfs_opts']['max_fs_size'] = '512m'
+# config_opts['plugin_conf']['tmpfs_opts']['mode'] = '0755'
+# config_opts['plugin_conf']['chroot_scan_enable'] = False
+# config_opts['plugin_conf']['chroot_scan_opts'] = [ "core(\.\d+)?", "\.log$",]
+
+#############################################################################
+#
+# environment for chroot
+#
+# config_opts['environment']['TERM'] = 'vt100'
+# config_opts['environment']['SHELL'] = '/bin/bash'
+# config_opts['environment']['HOME'] = '/builddir'
+# config_opts['environment']['HOSTNAME'] = 'mock'
+# config_opts['environment']['PATH'] = '/usr/bin:/bin:/usr/sbin:/sbin'
+# config_opts['environment']['PROMPT_COMMAND'] = 'echo -n "<mock-chroot>"'
+# config_opts['environment']['LANG'] = os.environ.setdefault('LANG', 'en_US.UTF-8')
+# config_opts['environment']['TZ'] = os.environ.setdefault('TZ', 'EST5EDT')
+
+#############################################################################
+#
+# Things that you can change, but we dont recommend it:
+# config_opts['chroothome'] = '/builddir'
+# config_opts['clean'] = True
+
+#############################################################################
+#
+# Things that must be adjusted if SCM integration is used:
+#
+# config_opts['scm'] = True
+# config_opts['scm_opts']['method'] = 'git'
+# config_opts['scm_opts']['cvs_get'] = 'cvs -d /srv/cvs co SCM_BRN SCM_PKG'
+# config_opts['scm_opts']['git_get'] = 'git clone SCM_BRN git://localhost/SCM_PKG.git SCM_PKG'
+# config_opts['scm_opts']['svn_get'] = 'svn co file:///srv/svn/SCM_PKG/SCM_BRN SCM_PKG'
+# config_opts['scm_opts']['spec'] = 'SCM_PKG.spec'
+# config_opts['scm_opts']['ext_src_dir'] = '/dev/null'
+# config_opts['scm_opts']['write_tar'] = True
+# config_opts['scm_opts']['git_timestamps'] = True
+
+# These options are also recognized but usually defined in cmd line
+# with --scm-option package=<pkg> --scm-option branch=<branch>
+# config_opts['scm_opts']['package'] = 'mypkg'
+# config_opts['scm_opts']['branch'] = 'master'
+
+#############################################################################
+#
+# Things that are best suited for individual chroot config files:
+#
+# MUST SET (in individual chroot cfg file):
+# config_opts['root'] = 'name-of-yum-build-dir'
+# config_opts['target_arch'] = 'i386'
+# config_opts['yum.conf'] = ''
+# config_opts['yum_common_opts'] = []
+#
+# CAN SET, defaults usually work ok:
+# config_opts['chroot_setup_cmd'] = 'install buildsys-build'
+# config_opts['log_config_file'] = 'logging.ini'
+# config_opts['more_buildreqs']['srpm_name-version-release'] = 'dependencies'
+# config_opts['macros']['%Add_your_macro_name_here'] = "add macro value here"
+# config_opts['files']['path/name/no/leading/slash'] = "put file contents here."
+# config_opts['chrootuid'] = os.getuid()
+
+# If you change chrootgid, you must also change "mock" to the correct group
+# name in this line of the mock PAM config:
+# auth sufficient pam_succeed_if.so user ingroup mock use_uid quiet
+# config_opts['chrootgid'] = grp.getgrnam("mock")[2]
+
+# config_opts['useradd'] = '/usr/sbin/useradd -m -u %(uid)s -g %(gid)s -d %(home)s -n %(user)s' # Fedora/RedHat
+#
+# Security related
+# config_opts['no_root_shells'] = False
+#
+# Proxy settings (https_proxy, ftp_proxy, and no_proxy can also be set)
+# config_opts['http_proxy'] = 'http://localhost:3128'
--- /dev/null
+#! /usr/bin/python -tt
+# Copyright 2019 Nokia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+This module loops through user given configuration and creates
+projects based on that information. Projects are then build
+"""
+import argparse
+import logging
+import os
+import platform
+import random
+import re
+import shutil
+import sys
+
+from rpmbuilder.baseerror import RpmbuilderError
+from rpmbuilder.buildhistory import Buildhistory
+from rpmbuilder.configfile import Configfilereader
+from rpmbuilder.log import configure_logging
+from rpmbuilder.mockbuilder import GitMockbuilder, LocalMockbuilder
+from rpmbuilder.packagebuilding import Packagebuilding
+from rpmbuilder.project import GitProject, LocalMountProject
+from rpmbuilder.prettyprinter import Prettyprint
+from rpmbuilder.rpmtools import Repotool
+from rpmbuilder.utils import find_files
+
+
+class Build(object):
+
+ """
+ Build configuration module which creates projects and does building
+ """
+
+ def __init__(self, args):
+ self.logger = logging.getLogger(__name__)
+ self.workspace = os.path.abspath(args.workspace)
+ if hasattr(args, 'buildconfig') and args.buildconfig:
+ self.configuration = Configfilereader(os.path.abspath(args.buildconfig))
+ self.builder = None
+ self.projects = {}
+ self.args = args
+ self.packagebuilder = Packagebuilding(args)
+
+ def update_building_blocks(self):
+ """ Update version control system components and project configuration """
+ # Mock building tools
+ Prettyprint().print_heading("Initialize builders", 80)
+ default_conf_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'defaults/lcc-epel-7-x86_64.cfg')
+ if hasattr(self.args, 'mockconf') and self.args.mockconf:
+ self.logger.debug("Loading Mock builder from local disk")
+ self.builder = LocalMockbuilder(self.args.mockconf)
+ elif hasattr(self, 'configuration') and self.configuration:
+ self.logger.debug("Loading Mock builder from Git")
+ self.builder = GitMockbuilder(self.workspace, self.configuration)
+ if self.builder.check_builder_changed():
+ self.args.forcerebuild = True
+ elif os.path.isfile(default_conf_file):
+ self.logger.debug("Loading default Mock configuration from %s file", default_conf_file)
+ self.builder = LocalMockbuilder(default_conf_file)
+ else:
+ self.logger.critical("No Mock builder configured. Define one in build config file or provide it with -m option.")
+ raise BuildingError("No Mock builder configured.")
+
+ # Projects outside of project configuration
+ if hasattr(self.args, 'localproj') and self.args.localproj:
+ self.update_local_mount_projects()
+
+ # Projects from build configuration file
+ if hasattr(self, 'configuration') and self.configuration:
+ self.update_configini_projects()
+
+ if not self.projects:
+ raise BuildingError("No projects defined. Nothing to build.")
+
+ def update_local_mount_projects(self):
+ """ Create project objects and initialize project configuration.
+ Project has been defined as argument """
+
+ Prettyprint().print_heading("Initialize local projects", 80)
+ for projectdir in self.args.localproj:
+ if not os.path.isdir(projectdir):
+ raise BuildingError("Given \"%s\" is not a directory" % projectdir)
+ project_specs = list(find_files(os.path.abspath(projectdir), r'.*\.spec$'))
+ for spec in project_specs:
+ projectname = os.path.basename(projectdir.rstrip('/'))
+ if len(list(project_specs)) > 1:
+ projectname = projectname + '_' + os.path.splitext(os.path.basename(spec))[0]
+ self.projects[projectname] = LocalMountProject(projectname,
+ os.path.abspath(projectdir),
+ self.workspace,
+ self.projects,
+ self.builder,
+ self.packagebuilder,
+ self.args,
+ spec_path=spec)
+
+ def update_configini_projects(self):
+ """ Create project objects and initialize project configuration.
+ Project has been defined in configuration file """
+ Prettyprint().print_heading("Initialize projects", 80)
+ for section in self.configuration.get_sections():
+ if self.configuration.get_string(section, "type") == "project" \
+ and self.configuration.get_bool(section, "enabled", defaultvalue=True):
+ if section in self.projects:
+ self.logger.warning("Local %s project already configured. Skipping build config entry", section)
+ else:
+ self.projects[section] = GitProject(section,
+ self.workspace,
+ self.configuration,
+ self.projects,
+ self.builder,
+ self.packagebuilder,
+ self.args)
+
+ def start_building(self):
+ """ search for changes and start building """
+ Prettyprint().print_heading("Summary of changes", 80)
+ projects_to_build = self.get_projects_to_build()
+ self.logger.debug("Final list of projects to build: %s",
+ str(projects_to_build))
+
+ Prettyprint().print_heading("Projects to build", 80)
+ if projects_to_build:
+ self.logger.info("%-30s %10s %10s", "Name", "Changed", "Rebuild")
+ for project in projects_to_build:
+ req_by = ""
+ if self.projects[project].buildrequires_upstream:
+ req_by = "(build requires: {})".format(
+ ', '.join(self.projects[project].buildrequires_upstream))
+ self.logger.info("%-30s %10s %10s %s",
+ self.projects[project].name,
+ self.projects[project].project_changed,
+ self.projects[project].project_rebuild_needed,
+ req_by)
+
+ Prettyprint().print_heading("Building projects", 80)
+
+ if self.mock_projects(projects_to_build):
+ self.logger.info("All built succesfully..")
+ Prettyprint().print_heading("Running final steps", 80)
+ self.finalize(projects_to_build)
+
+ # Clean mock chroot
+ for mockroot in self.builder.roots:
+ if self.args.scrub:
+ self.packagebuilder.scrub_mock_chroot(self.builder.get_configdir(),
+ mockroot)
+ return True
+ else:
+ self.logger.critical("Problems while building")
+ raise BuildingError("Error during rpm mock")
+ else:
+ self.logger.info("No projects to build.. no changes")
+ return None
+
+ def get_projects_to_build(self):
+ """ Find which project are not built yet """
+ buildlist = []
+ # Find projects that need to be build because of change
+ for project in self.projects:
+ if self.projects[project].project_changed \
+ or self.projects[project].project_rebuild_needed:
+ self.logger.info("Project \"%s\": Need to build", project)
+ buildlist.append(project)
+ else:
+ self.logger.info("Project \"%s\": OK. Already built", project)
+
+ # Find projects that have list changed projects in buildrequires
+ if buildlist:
+ self.logger.debug("Projects %s need building.", str(buildlist))
+ self.logger.debug("Looking for projects that need rebuild")
+ projects_to_rebuild = []
+ for project in buildlist:
+ self.logger.debug("Project \"%s\" need building.", project)
+ self.logger.debug("Checking if downstream requires rebuilding")
+ need_rebuild = \
+ self.projects[
+ project].mark_downstream_for_rebuild(set(buildlist))
+ self.logger.debug("Rebuild needed for: %s", str(need_rebuild))
+ projects_to_rebuild.extend(need_rebuild)
+ buildlist.extend(projects_to_rebuild)
+ buildlist = list(set(buildlist))
+ random.shuffle(buildlist)
+ return buildlist
+
+ def mock_projects(self, build_list):
+ """ Loop through all mock chroots to build projects """
+ for mockroot in self.builder.roots:
+ Prettyprint().print_heading("Processing chroot " + mockroot, 70)
+ if self.args.init:
+ # Create mock chroot for project building
+ self.packagebuilder.init_mock_chroot(os.path.join(self.workspace, "mocksettings", "logs"),
+ self.builder.get_configdir(),
+ mockroot)
+ # Restore local yum repository to Mock environment
+ hostyumrepository = os.path.join(self.workspace, "buildrepository", mockroot, "rpm")
+ if os.path.isdir(os.path.join(hostyumrepository, "repodata")):
+ logfile = os.path.join(self.workspace, 'restore-mock-env-yum-repository.log')
+ self.packagebuilder.restore_local_repository(hostyumrepository,
+ "/usr/localrepo",
+ self.builder.get_configdir(),
+ mockroot,
+ logfile=logfile)
+
+ # Mock projects
+ if not self.build_projects(build_list, mockroot):
+ return False
+ return True
+
+ def upstream_packages_in_buildlist(self, project, buildlist):
+ for proj in self.projects[project].buildrequires_upstream:
+ if proj in buildlist:
+ return True
+ return False
+
+ def build_projects(self, build_list, mockroot):
+ """ Build listed projects """
+ self.logger.debug("%s: Projects to build=%s",
+ mockroot,
+ str(build_list))
+ self.packagebuilder.update_local_repository(self.builder.get_configdir(), mockroot)
+ something_was_built = True
+ while something_was_built:
+ something_was_built = False
+ not_built = []
+ for project in build_list:
+ self.logger.debug("Trying to build: {}".format(project))
+ self.logger.debug("Build list: {}".format(build_list))
+ if not self.upstream_packages_in_buildlist(project, build_list):
+ self.projects[project].resolve_dependencies(mockroot)
+ self.logger.debug("OK to build {}".format(project))
+ self.projects[project].build_project(mockroot)
+ something_was_built = True
+ self.packagebuilder.update_local_repository(self.builder.get_configdir(), mockroot)
+ else:
+ self.logger.debug("Skipping {} because upstream is not built yet".format(project))
+ not_built.append(project)
+ build_list = not_built
+
+ if build_list:
+ self.logger.warning("Requirements not available for \"%s\"",
+ ", ".join(build_list))
+ return False
+ return True
+
+ def finalize(self, projectlist):
+ """ Do final work such as create yum repositories """
+ commonrepo = os.path.join(self.workspace, 'buildrepository')
+ self.logger.info("Hard linking rpm packages to %s", commonrepo)
+ for project in projectlist:
+ self.projects[project].store_build_products(commonrepo)
+
+ for mockroot in self.builder.roots:
+ Repotool().createrepo(os.path.join(self.workspace,
+ 'buildrepository',
+ mockroot,
+ 'rpm'))
+ Repotool().createrepo(os.path.join(self.workspace,
+ 'buildrepository',
+ mockroot,
+ 'srpm'))
+ # Store information of used builder
+ # Next run then knows what was used in previous build
+ self.builder.store_builder_status()
+
+ buildhistory = Buildhistory()
+ historyfile = os.path.join(commonrepo, "buildhistory")
+ buildhistory.update_history(historyfile,
+ projectlist,
+ self.projects)
+ return True
+
+ def rm_obsolete_projectdirs(self):
+ """ Clean projects which are not listed in configuration """
+ self.logger.debug("Cleaning unused project directories")
+ projects_directory = os.path.join(self.workspace, 'projects')
+ if not os.path.isdir(projects_directory):
+ return True
+ for subdir in os.listdir(projects_directory):
+ fulldir = os.path.join(projects_directory, subdir)
+ if subdir in self.projects:
+ self.logger.debug("Project directory %s is active",
+ fulldir)
+ else:
+ self.logger.debug("Removing directory %s. No match in projects",
+ fulldir)
+ shutil.rmtree(fulldir)
+ return True
+
+
+class BuildingError(RpmbuilderError):
+ """ Exceptions originating from builder """
+ pass
+
+
+def warn_if_incompatible_distro():
+ if platform.linux_distribution()[0].lower() not in ['fedora', 'redhat', 'rhel', 'centos']:
+ logger = logging.getLogger()
+ logger.warning("Distribution compatibility check failed.\n"
+ "If you use other than Fedora, RedHat or CentOS based Linux distribution, you might experience problems\n"
+ "in case there are BuildRequirements between your own packages. For more information, read README.md")
+
+
+class ArgumentMakebuild(object):
+ """ Default arguments which are always needed """
+
+ def __init__(self):
+ """ init """
+ self.parser = argparse.ArgumentParser(description='''
+ RPM building tool for continuous integration and development usage.
+ ''')
+ self.set_arguments(self.parser)
+
+ def set_arguments(self, parser):
+ """ Add relevant arguments """
+ parser.add_argument("localproj",
+ metavar="dir",
+ help="Local project directory outside of buildconfig. This option can be used multiple times.",
+ nargs="*")
+ parser.add_argument("-w",
+ "--workspace",
+ help="Sandbox directory for builder. Used to store repository clones and built rpm files. Required option.",
+ required=True)
+# parser.add_argument("-b",
+# "--buildconfig",
+# help="Build configuration file lists projects and mock configuration. Required option.")
+ parser.add_argument("-m",
+ "--mockconf",
+ help="Local Mock configuration file. Overrides mock settings from build configuration.")
+ parser.add_argument("--mockarguments",
+ help="Arguments to be passed to mock. Check possible arguments from mock man pages")
+ parser.add_argument("-v",
+ "--verbose",
+ help="Verbosed printing.",
+ action="store_true")
+ parser.add_argument("-f",
+ "--forcerebuild",
+ help="Force rebuilding of all projects.",
+ action="store_true")
+ parser.add_argument("--nowipe",
+ help="Skip cleaning of Mock chroot if build fails. "
+ "Old chroot can be used for debugging but if you use this option, then you need to clean unused chroot manually.",
+ action="store_false",
+ dest="scrub")
+ parser.add_argument("--nosrpm",
+ help="Skip source rpm creation.",
+ action="store_true")
+ parser.add_argument("--noinit",
+ help="Skip initialization (cleaning) of mock chroot.",
+ default=True,
+ action="store_false",
+ dest="init")
+ parser.add_argument("--uniqueext",
+ help="Unique extension used for cache.",
+ default=str(os.getpid()),
+ dest="uniqueext")
+
+
+def main():
+ """ Read arguments and start processing build configuration """
+ args = ArgumentMakebuild().parser.parse_args()
+
+ debugfiletarget = os.path.join(args.workspace, 'debug.log')
+ configure_logging(args.verbose, debugfiletarget)
+
+ warn_if_incompatible_distro()
+
+ # Start the build system
+ try:
+ build = Build(args)
+ build.update_building_blocks()
+ build.start_building()
+ except RpmbuilderError as err:
+ logger = logging.getLogger()
+ logger.error("Could not produce a build. %s", err)
+ warn_if_incompatible_distro()
+ raise
+
+if __name__ == "__main__":
+ try:
+ main()
+ except RpmbuilderError:
+ sys.exit(1)
--- /dev/null
+# Copyright 2019 Nokia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+class RpmbuilderError(Exception):
+ """ Base error for rpmbuilder errors """
+ pass
--- /dev/null
+# Copyright 2019 Nokia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""" Writing of history for the build. History explain why different
+projects were built at some time. """
+
+import logging
+import datetime
+import json
+import os
+
+class Buildhistory(object):
+
+ """ Build history checks what has been built and
+ creates a file using this information """
+
+ def __init__(self):
+ self.logger = logging.getLogger(__name__)
+
+ def update_history(self, outfile, built_projects, projects):
+ """ Request history and push it to be written into file """
+ history = self.__gather_history(built_projects, projects)
+ self.__write_history_txt(outfile + '.log', history)
+ self.__write_history_json(outfile + '.json', history)
+
+ def __write_history_txt(self, outfile, history):
+ """ Write history to a file """
+ self.logger.info("Writing build history to %s", outfile)
+ with open(outfile, 'a') as fpoint:
+ for change in history:
+ fpoint.write(change + '\n')
+ for project in history[change]:
+ fpoint.write(' ' + project)
+ if 'commit' in history[change][project]:
+ fpoint.write(' ' + history[change][project]['commit'] + '\n')
+ else:
+ fpoint.write('\n')
+ for rpmfile in history[change][project]['rpmfiles']:
+ fpoint.write(' ' + rpmfile + '\n')
+ for rpmfile in history[change][project]['srpmfiles']:
+ fpoint.write(' ' + rpmfile + '\n')
+
+ def __write_history_json(self, outfile, history):
+ """ Write dict history to a file as json """
+ self.logger.info("Writing build history to %s", outfile)
+ jsondata = {}
+ if os.path.isfile(outfile):
+ with open(outfile, 'r') as fpoint:
+ jsondata = json.load(fpoint)[0]
+ jsondata.update(history)
+ with open(outfile, 'w') as fpoint:
+ fpoint.write(json.dumps([jsondata], indent=2, sort_keys=True) + '\n')
+
+ """ Example of output content
+{
+ "2018-10-11 08:39:16.918914": {
+ "ansible-fm": {
+ "rpmfiles": [
+ "ansible-fm-c46.gde71b7e-1.el7.centos.noarch.rpm"
+ ],
+ "commit": "de71b7e7fc0410df3d74cf209f5216b24157988a",
+ "srpmfiles": [
+ "ansible-fm-c46.gde71b7e-1.el7.centos.src.rpm"
+ ]
+ }
+ }
+}
+ """
+
+ @staticmethod
+ def __gather_history(built_projects, projects):
+ """ Loop projects and check what are the versions. This is then history """
+ builddate = str(datetime.datetime.now())
+ historydict = {builddate: {}} # dict for all projects
+ for project in built_projects:
+ # Store commit hash version
+ commitsha = None
+ if projects[project].project_changed and hasattr(projects[project], 'vcs') and projects[project].vcs.commitsha:
+ commitsha = projects[project].vcs.commitsha
+
+ # List new rpm files from a project
+ rpmfiles = []
+ srpmfiles = []
+ for buildroot in projects[project].builders.roots:
+ (rpmlist, srpmlist) = projects[project].list_buildproducts_for_mockroot(buildroot)
+ rpmfiles.extend(rpmlist)
+ srpmfiles.extend(srpmlist)
+ projectchange = {project: {'rpmfiles': rpmfiles, 'srpmfiles': srpmfiles}}
+ if commitsha:
+ projectchange[project].update({'commit': commitsha})
+ historydict[builddate].update(projectchange)
+ return historydict
--- /dev/null
+# Copyright 2019 Nokia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Read sections from a build configuration file and check that
+all required values have been given.
+"""
+import ConfigParser
+import logging
+import re
+
+from rpmbuilder.baseerror import RpmbuilderError
+
+
+class Configfilereader(object):
+
+ """ Reading and processing of user given configuration file """
+
+ def __init__(self, configfile):
+ self.logger = logging.getLogger(__name__)
+ self.configfile = configfile
+ self.configuration = self.readconfig(configfile)
+
+ def readconfig(self, configfile):
+ """ Configuration file reading """
+ conf = ConfigParser.ConfigParser()
+ try:
+ with open(configfile) as filep:
+ conf.readfp(filep)
+ except IOError:
+ raise ConfigError("Failed to open configuration file %s" % configfile)
+
+ self.__validate_section_names(conf)
+ return conf
+
+ def get_bool(self, section, option, mandatory=False, defaultvalue=False):
+ """ Get boolean values from configuration. In case of problems do raise
+ or just return default value """
+ try:
+ return self.configuration.getboolean(section, option)
+ except ConfigParser.NoSectionError:
+ raise ConfigError("Could not find required [%s] section in configuration" % section)
+ except ConfigParser.NoOptionError:
+ if mandatory:
+ raise ConfigError("Could not find option %s from [%s] section" % option, section)
+ else:
+ return defaultvalue
+ except:
+ raise
+
+ def get_string(self, section, option, mandatory=False, defaultvalue=None):
+ """ Return the requested value from the given section. In case of problems
+ do raise or just return default value"""
+ try:
+ return self.configuration.get(section, option)
+ except ConfigParser.NoSectionError:
+ raise ConfigError("Could not find required [%s] section in configuration" % section)
+ except ConfigParser.NoOptionError:
+ if mandatory:
+ raise ConfigError("Could not find option %s from [%s] section" % option, section)
+ else:
+ return defaultvalue
+ except:
+ raise
+
+ def get_sections(self):
+ """ List all sections from the configuration """
+ try:
+ return self.configuration.sections()
+ except:
+ raise
+
+ def __validate_section_names(self, configuration):
+ """ Loop through all section names and do validation """
+ for section in configuration.sections():
+ self.__validate_section_name(section)
+
+ def __validate_section_name(self, name):
+ """ Check that section contains characters that
+ do not cause problems for directory names """
+ if not re.match('^[A-Za-z0-9-]+$', name):
+ self.logger.critical("Configuration of [%s] has problems.", name,
+ "Section name can has illegal characters"
+ "Use only alphanumeric and dashes")
+ raise ConfigError("Section %s name contains illegal characters" % name)
+
+
+class ConfigError(RpmbuilderError):
+
+ """ Exception for configuration file content problems """
+ pass
--- /dev/null
+# Copyright 2019 Nokia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import logging
+import subprocess
+
+logger = logging.getLogger(__name__)
+
+
+class Executor(object):
+ def run(self, cmd):
+ logger.debug('Executing: {}'.format(cmd))
+ p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ stdout, stderr = p.communicate()
+ if stderr and p.returncode == 0:
+ logger.debug(
+ 'Command {} exit status {} but stderr not empty: "{}"'.format(cmd, p.returncode,
+ stderr))
+ if p.returncode != 0:
+ raise Exception('Command {} returned non-zero exit status {}: '
+ 'stdout="{}", stderr="{}"'.format(cmd, p.returncode, stdout, stderr))
+ return stdout
--- /dev/null
+# Copyright 2019 Nokia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import re
+import mock
+import pytest
+
+from rpmbuilder.executor import Executor
+
+
+@pytest.mark.parametrize('input_cmd, expected_output', [
+ (['true'], ''),
+ (['echo', 'foo'], 'foo\n'),
+])
+def test_run_cmd(input_cmd, expected_output):
+ assert Executor().run(input_cmd) == expected_output
+
+
+@mock.patch('logging.Logger.debug')
+@mock.patch('subprocess.Popen')
+def test_stderr_is_logged(mock_popen, mock_log):
+ process_mock = mock.Mock()
+ process_mock.configure_mock(**{
+ 'communicate.return_value': ('some ouput', 'some errput'),
+ 'returncode': 0,
+ })
+ mock_popen.return_value = process_mock
+ Executor().run(['ls'])
+ assert re.match('.*exit status 0 but stderr not empty.*', mock_log.call_args[0][0])
+
+
+def test_run_cmd_fail():
+ err_regexp = 'Command .* returned non-zero exit status 2: stdout="", ' \
+ 'stderr="ls: .* No such file or directory'
+ with pytest.raises(Exception,
+ match=err_regexp):
+ Executor().run(['ls', 'bar'])
--- /dev/null
+#!/usr/bin/env python
+# Copyright 2019 Nokia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import re
+import shutil
+import hashlib
+import lxml.html
+import urllib3
+
+
+HTTP = urllib3.PoolManager()
+
+def debug(log):
+ print(log)
+
+def verbose(log):
+ print(log)
+
+def filter_dot(lst):
+ return filter(lambda path: path[0] != '.', lst)
+
+def get_url(url, file_hash):
+ debug("http get {}".format(url))
+ request = HTTP.request('GET', url)
+ dom = lxml.html.fromstring(request.data)
+ for link in filter_dot(dom.xpath('//a/@href')):
+ path = '{}/{}'.format(url, link)
+ debug("http get {}".format(path))
+ request = HTTP.request('GET', path)
+ dom = lxml.html.fromstring(request.data)
+ if file_hash in dom.xpath('//a/@href'):
+ return '{}{}'.format(path, file_hash)
+
+def get_repo_name(path):
+ regex = re.compile(r'^\.([^.]*).metadata$')
+ meta = list(filter(regex.match, os.listdir(path)))
+ if len(meta) == 0:
+ return None
+ if len(meta) != 1:
+ raise Exception('Multiple metadata files: {}'.format(", ".join(meta)))
+ repo_name = regex.search(meta[0]).group(1)
+ debug("repo name is {}".format(repo_name))
+ return repo_name
+
+def parse_metadatafile(path, repo_name):
+ result = {}
+ filename = "{}/.{}.metadata".format(path, repo_name)
+ debug("metadata file: {}".format(filename))
+ with open(filename) as metadata:
+ for line in metadata:
+ items = line.split()
+ result[items[1]] = items[0]
+ debug('found {}: {}'.format(items[1], items[0]))
+ return result
+
+def get_hash(filename, hashfunc):
+ with open(filename, 'rb', buffering=0) as contents:
+ for buffer in iter(lambda: contents.read(128*1024), b''):
+ hashfunc.update(buffer)
+ digest = hashfunc.hexdigest()
+ debug("digest is {}".format(digest))
+ return digest
+
+def check_file(filename, checksum):
+ debug("checking {} {}".format(filename, checksum))
+ hashmap = {
+ 32 : hashlib.md5(),
+ 40 : hashlib.sha1(),
+ 64 : hashlib.sha256(),
+ 128 : hashlib.sha512()
+ }
+ if len(checksum) not in hashmap:
+ raise Exception('Checksum lenght unsupported: {}'.format(checksum))
+ if get_hash(filename, hashmap[len(checksum)]) != checksum:
+ raise Exception("Checksum doesn't match: {} {}".format(filename, checksum))
+ debug("checksum ok")
+
+def download(url, destination, checksum):
+ tmpfile = "{}.tmp".format(destination)
+ try:
+ debug("downloading {} to {}".format(url, tmpfile))
+ with HTTP.request('GET', url, preload_content=False) as resp, open(tmpfile, 'wb') as out_file:
+ shutil.copyfileobj(resp, out_file)
+ check_file(tmpfile, checksum)
+ debug("renaming {} to {}".format(tmpfile, destination))
+ os.rename(tmpfile, destination)
+ finally:
+ try:
+ os.remove(tmpfile)
+ debug("removed {}".format(tmpfile))
+ except OSError:
+ pass
+
+def get_sources(path, sources_list, logger):
+ if logger:
+ global debug
+ global verbose
+ debug = logger.debug
+ verbose = logger.info
+
+ repo = get_repo_name(path)
+ if not repo:
+ verbose('no metadata file in "{}".'.format(path))
+ return
+
+ for k, v in parse_metadatafile(path, repo).items():
+ filename = os.path.join(path, k)
+ try:
+ check_file(filename, v)
+ except:
+ found = False
+ for sources in sources_list:
+ repo_root = "{}/{}".format(sources, repo)
+ url = get_url(repo_root, v)
+ if url:
+ debug("retrieving {} to {}".format(url, filename))
+ download(url, filename, v)
+ verbose('retrieved "{}"'.format(k))
+ found = True
+ break
+ if not found:
+ raise Exception('File "{}" not found'.format(v))
--- /dev/null
+# Copyright 2019 Nokia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Logging configuration for rpm builder
+"""
+import logging
+import os
+import sys
+
+def configure_logging(verbose=False, debugfile="debug.log"):
+ """ Logging to screen(console) and file """
+
+ debugfile_dirname = os.path.dirname(debugfile)
+ if not os.path.isdir(debugfile_dirname):
+ os.mkdir(debugfile_dirname)
+
+ logging.basicConfig(level=logging.DEBUG,
+ format='%(asctime)s %(levelname)s: %(message)s',
+ filename=debugfile,
+ filemode='w')
+ # define a Handler which writes INFO messages or higher to the sys.stderr
+ console = logging.StreamHandler(stream=sys.stdout)
+ if verbose:
+ console.setLevel(logging.DEBUG)
+ else:
+ console.setLevel(logging.INFO)
+ # set a format which is simpler for console use
+ formatter = logging.Formatter('%(levelname)s: %(message)s')
+ # tell the handler to use this format
+ console.setFormatter(formatter)
+ # add the handler to the root log
+ logging.getLogger('').addHandler(console)
--- /dev/null
+# Copyright 2019 Nokia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""" Handling of mock building environment """
+import json
+import logging
+import os
+
+from rpmbuilder.baseerror import RpmbuilderError
+from rpmbuilder.version_control import VersionControlSystem
+
+
+class Mockbuilder(object):
+
+ """ Mockbuilder handled mock building configuration """
+
+ def __init__(self):
+ self.logger = logging.getLogger(__name__)
+ self.roots = []
+
+
+class LocalMockbuilder(Mockbuilder):
+
+ """ Mock configuration contains information of chroot used for building.
+ Configuration is taken from local file"""
+
+ def __init__(self, configfile):
+ super(LocalMockbuilder, self).__init__()
+
+ self.roots.append(os.path.basename(configfile.rstrip('/').rstrip('.cfg')))
+ self.configdir = os.path.dirname(os.path.abspath(configfile))
+
+ def get_configdir(self):
+ return self.configdir
+
+ def store_builder_status(self):
+ pass
+
+
+class GitMockbuilder(Mockbuilder):
+
+ """ Mock configuration contains information of chroot used for building.
+ Configuration is taken from git"""
+
+ def __init__(self, workspace, conf):
+ super(GitMockbuilder, self).__init__()
+
+ self.mock_settings_dir = os.path.join(workspace, "mocksettings")
+ self.mock_settings_checkout_dir = os.path.join(self.mock_settings_dir,
+ "checkout")
+
+ confsection = "mock"
+ self.roots = self.__list_from_csv(conf.get_string(confsection,
+ "roots",
+ mandatory=True))
+
+ self.vcs = VersionControlSystem(self.mock_settings_checkout_dir)
+
+ try:
+ self.vcs.update_git_project(conf.get_string(confsection,
+ "url",
+ mandatory=True),
+ conf.get_string(confsection,
+ "ref",
+ mandatory=True))
+ except:
+ self.logger.critical("Problems updating git clone")
+ raise
+
+ def get_configdir(self):
+ return os.path.join(self.mock_settings_checkout_dir, 'etc', 'mock')
+
+ def store_builder_status(self):
+ """ Save information of the builder checkout. This way we can
+ check if mock configuration has changed and all projects can be
+ rebuild """
+ statusfile = os.path.join(self.mock_settings_dir, 'status.txt')
+ self.logger.debug("Updating %s", statusfile)
+ projectstatus = {"sha": self.vcs.commitsha}
+ try:
+ with open(statusfile, 'w') as outfile:
+ json.dump(projectstatus, outfile)
+ except:
+ self.logger.error("Could not create a status file")
+ raise
+
+ def check_builder_changed(self):
+ """
+ Check if there has been changes in the project
+ if project has not been compiled -> return = True
+ if project has GIT/VCS changes -> return = True
+ if project has not changed -> return = False
+ """
+ statusfile = os.path.join(self.mock_settings_dir, 'status.txt')
+
+ if os.path.isfile(statusfile):
+ with open(statusfile, 'r') as filep:
+ previousprojectstatus = json.load(filep)
+ # Compare old values against new values
+ if previousprojectstatus['sha'] != self.vcs.commitsha:
+ self.logger.debug("Mock configuration has changed")
+ return True
+ else:
+ self.logger.debug("Mock configuration has NO changes")
+ return False
+ else:
+ # No configuration means that project has not been compiled
+ pass
+ return True
+
+ @staticmethod
+ def __list_from_csv(csv):
+ """ Create a list of comma separated value list
+ For example foo,bar would be converted to ["foo","bar"] """
+ outlist = []
+ for entry in set(csv.split(',')):
+ outlist.append(entry.strip())
+ return outlist
+
+
+class MockbuilderError(RpmbuilderError):
+
+ """ Exceptions originating from Builder and main level """
+ pass
--- /dev/null
+# Copyright 2019 Nokia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""" Module in charge of building a project """
+import glob
+import logging
+import os
+import pwd
+import shutil
+import subprocess
+from distutils.spawn import find_executable
+
+import datetime
+from rpmbuilder.baseerror import RpmbuilderError
+from rpmbuilder.prettyprinter import Prettyprint
+
+PIGZ_INSTALLED = False
+PBZIP2_INSTALLED = False
+PXZ_INSTALLED = False
+
+class Packagebuilding(object):
+
+ """ Object for building rpm files with mock """
+
+ def __init__(self, masterargs):
+ # Chroothousekeeping cleans chroot in case of mock errors. This should
+ # keep /var/lib/mock from growing too much
+ self.masterargs = masterargs
+ self.logger = logging.getLogger(__name__)
+ self.__check_tool_availability()
+ self.chroot_installed_rpms = []
+
+ if find_executable("pigz"):
+ global PIGZ_INSTALLED
+ PIGZ_INSTALLED = True
+ self.logger.debug("pigz is available")
+ if find_executable("pbzip2"):
+ global PBZIP2_INSTALLED
+ PBZIP2_INSTALLED = True
+ self.logger.debug("pbzip2 is available")
+ if find_executable("pxz"):
+ global PXZ_INSTALLED
+ PXZ_INSTALLED = True
+ self.logger.debug("pxz is available")
+
+ @staticmethod
+ def __check_tool_availability():
+ """ Verify that user belongs to mock group for things to work """
+ username = pwd.getpwuid(os.getuid())[0]
+ cmd = "id " + username + "| grep \\(mock\\) > /dev/null"
+ if os.system(cmd) != 0:
+ raise PackagebuildingError("Mock tool requires user to "
+ "belong to group called mock")
+ return True
+
+ def patch_specfile(self, origspecfile, outputdir, newversion, newrelease):
+ """ Spec file is patched with version information from git describe """
+ Prettyprint().print_heading("Patch spec", 50)
+ self.logger.info("Patching new spec from %s", origspecfile)
+ self.logger.debug(" - Version: %s", newversion)
+ self.logger.debug(" - Release: %s", newrelease)
+
+ specfilebasename = os.path.basename(origspecfile)
+ patchedspecfile = os.path.join(outputdir, specfilebasename)
+ self.logger.debug("Writing new spec file to %s", patchedspecfile)
+
+ with open(origspecfile, 'r') as filepin:
+ filepin_lines = filepin.readlines()
+
+ with open(patchedspecfile, 'w') as filepout:
+ for line in filepin_lines:
+ linestripped = line.strip()
+ if not linestripped.startswith("#"):
+ # Check if version could be patched
+ if linestripped.lower().startswith("version:"):
+ filepout.write("Version: " + newversion + '\n')
+ elif linestripped.lower().startswith("release:"):
+ filepout.write("Release: " + newrelease + '\n')
+ else:
+ filepout.write(line)
+ return patchedspecfile
+
+ def init_mock_chroot(self, resultdir, configdir, root):
+ """
+ Start a mock chroot where build requirements
+ can be installed before building
+ """
+ Prettyprint().print_heading("Mock init in " + root, 50)
+
+ self.clean_directory(resultdir)
+
+ mock_arg_resultdir = "--resultdir=" + resultdir
+
+ mocklogfile = resultdir + '/mock-init-' + root + '.log'
+
+ arguments = [mock_arg_resultdir,
+ "--scrub=all"]
+ self.run_mock_command(arguments, mocklogfile, configdir, root)
+
+ #Allow the builder to run sudo without terminal and without password
+ #This makes it possible to run disk image builder needed by ipa-builder
+ allow_sudo_str = "mockbuild ALL=(ALL) NOPASSWD: ALL"
+ notty_str = "Defaults:mockbuild !requiretty"
+ sudoers_file = "/etc/sudoers"
+ command = "grep \'%s\' %s || echo -e \'%s\n%s\' >> %s" %(allow_sudo_str, sudoers_file, allow_sudo_str, notty_str, sudoers_file)
+ arguments=["--chroot",
+ command ]
+ self.run_mock_command(arguments, mocklogfile, configdir, root)
+
+ return True
+
+ def restore_local_repository(self, localdir, destdir, configdir, root, logfile):
+ """
+ Mock copying local yum repository to mock environment so that it can
+ be used during building of other RPM packages.
+ """
+ Prettyprint().print_heading("Restoring local repository", 50)
+ arguments = ["--copyin",
+ localdir,
+ destdir]
+ self.run_mock_command(arguments, logfile, configdir, root)
+
+ def mock_source_rpm(self, hostsourcedir, specfile, resultdir, configdir, root):
+ """ Mock SRPM file which can be used to build rpm """
+ Prettyprint().print_heading("Mock source rpm in " + root, 50)
+ self.logger.info("Build from:")
+ self.logger.info(" - source directory %s", hostsourcedir)
+ self.logger.info(" - spec %s", specfile)
+
+ self.clean_directory(resultdir)
+
+ mock_arg_resultdir = "--resultdir=" + resultdir
+ mock_arg_spec = "--spec=" + specfile
+ mock_arg_sources = "--sources=" + hostsourcedir
+ arguments = [mock_arg_resultdir,
+ "--no-clean",
+ "--no-cleanup-after",
+ "--buildsrpm",
+ mock_arg_sources,
+ mock_arg_spec]
+
+ mocklogfile = resultdir + '/mock.log'
+ self.run_mock_command(arguments, mocklogfile, configdir, root)
+
+ # Find source rpm and return the path
+ globstring = resultdir + '/*.src.rpm'
+ globmatches = glob.glob(globstring)
+ assert len(globmatches) == 1, "Too many source rpm files"
+
+ return globmatches[0]
+
+ def mock_rpm(self, sourcerpm, resultdir, configdir, root):
+ """ Mock RPM binary file from SRPM """
+ Prettyprint().print_heading("Mock rpm in " + root, 50)
+ self.logger.info("Building from:")
+ self.logger.info(" - source rpm %s", sourcerpm)
+
+ self.clean_directory(resultdir)
+
+ mock_arg_resultdir = "--resultdir=" + resultdir
+ arguments = [mock_arg_resultdir,
+ "--no-clean",
+ "--no-cleanup-after",
+ "--rebuild",
+ sourcerpm]
+
+ mocklogfile = resultdir + '/mock.log'
+ self.run_mock_command(arguments, mocklogfile, configdir, root)
+
+ self.logger.debug("RPM files build to: %s", resultdir)
+ return True
+
+ def mock_rpm_from_archive(self, source_tar_packages, resultdir, configdir, root):
+ """ Mock rpm binary file straight from archive file """
+ self.clean_directory(resultdir)
+
+ # Copy source archive to chroot
+ chroot_sourcedir = "/builddir/build/SOURCES/"
+ self.copy_to_chroot(configdir, root, resultdir, source_tar_packages, chroot_sourcedir)
+
+ # Create rpm from source archive
+ sourcebasename = os.path.basename(source_tar_packages[0])
+ chrootsourcefile = os.path.join(chroot_sourcedir, sourcebasename)
+
+ Prettyprint().print_heading("Mock rpm in " + root, 50)
+ self.logger.info("Building from:")
+ self.logger.info(" - source archive %s", chrootsourcefile)
+
+ mock_arg_resultdir = "--resultdir=" + resultdir
+ rpmbuildcommand = "/usr/bin/rpmbuild --noclean -tb -v "
+ rpmbuildcommand += os.path.join(chroot_sourcedir, chrootsourcefile)
+ arguments = [mock_arg_resultdir,
+ "--chroot",
+ rpmbuildcommand]
+ mocklogfile = resultdir + '/mock-rpmbuild.log'
+ self.run_mock_command(arguments, mocklogfile, configdir, root)
+
+ def mock_rpm_from_filesystem(self, path, spec, resultdir, configdir, root, srpm_resultdir):
+ """ Mock rpm binary file straight from archive file """
+ self.clean_directory(resultdir)
+ # Copy source archive to chroot
+ chroot_sourcedir = "/builddir/build/"
+ self.copy_to_chroot(configdir, root, resultdir, [os.path.join(path, 'SPECS', spec)], os.path.join(chroot_sourcedir, 'SPECS'))
+ self.copy_to_chroot(configdir, root, resultdir, [os.path.join(path, 'SOURCES', f) for f in os.listdir(os.path.join(path, 'SOURCES'))], os.path.join(chroot_sourcedir, 'SOURCES'))
+
+ Prettyprint().print_heading("Mock rpm in " + root, 50)
+ mocklogfile = resultdir + '/mock-rpmbuild.log'
+ mock_arg_resultdir = "--resultdir=" + resultdir
+ arguments = [mock_arg_resultdir,
+ "--chroot",
+ "chown -R root:root "+chroot_sourcedir]
+ self.run_mock_command(arguments, mocklogfile, configdir, root)
+ rpmbuildcommand = "/usr/bin/rpmbuild --noclean -ba -v "
+ rpmbuildcommand += os.path.join(chroot_sourcedir, 'SPECS', spec)
+ arguments = [mock_arg_resultdir,
+ "--chroot",
+ rpmbuildcommand]
+ mocklogfile = resultdir + '/mock-rpmbuild.log'
+ self.run_mock_command(arguments, mocklogfile, configdir, root)
+
+ arguments = ["--copyout",
+ "/builddir/build", resultdir+"/tmp/packages"]
+ mocklogfile = resultdir + '/mock-copyout.log'
+ self.run_mock_command(arguments, mocklogfile, configdir, root)
+
+ for filename in glob.glob(resultdir+"/tmp/packages/RPMS/*"):
+ shutil.move(filename, resultdir)
+
+ for filename in glob.glob(resultdir+"/tmp/packages/SRPMS/*"):
+ shutil.move(filename, srpm_resultdir)
+
+ def mock_wipe_buildroot(self, resultdir, configdir, root):
+ """ Wipe buildroot clean """
+ Prettyprint().print_heading("Wiping buildroot", 50)
+ arguments = ["--chroot",
+ "mkdir -pv /usr/localrepo && " \
+ "cp -v /builddir/build/RPMS/*.rpm /usr/localrepo/. ;" \
+ "rm -rf /builddir/build/{BUILD,RPMS,SOURCES,SPECS,SRPMS}/*"]
+ mocklogfile = resultdir + '/mock-wipe-buildroot.log'
+ self.run_mock_command(arguments, mocklogfile, configdir, root)
+
+ def update_local_repository(self, configdir, root):
+ Prettyprint().print_heading("Update repository " + root, 50)
+
+ arguments = ["--chroot",
+ "mkdir -pv /usr/localrepo && " \
+ "createrepo --update /usr/localrepo && yum clean expire-cache"]
+ self.run_mock_command(arguments, configdir+"/log", configdir, root)
+
+ def copy_to_chroot(self, configdir, root, resultdir, source_files, destination):
+ # Copy source archive to chroot
+ Prettyprint().print_heading("Copy source archive to " + root, 50)
+ self.logger.info(" - Copy from %s", source_files)
+ self.logger.info(" - Copy to %s", destination)
+
+ mock_arg_resultdir = "--resultdir=" + resultdir
+ arguments = [mock_arg_resultdir,
+ "--copyin"]
+ arguments.extend(source_files)
+ arguments.append(destination)
+
+ mocklogfile = resultdir + '/mock-copyin.log'
+ self.run_mock_command(arguments, mocklogfile, configdir, root)
+
+ def scrub_mock_chroot(self, configdir, root):
+ time_start = datetime.datetime.now()
+ Prettyprint().print_heading("Scrub mock chroot " + root, 50)
+ mock_clean_command = ["/usr/bin/mock",
+ "--configdir=" + configdir,
+ "--root=" + root,
+ "--uniqueext=" + self.masterargs.uniqueext,
+ "--orphanskill",
+ "--scrub=chroot"]
+ self.logger.info("Removing mock chroot.")
+ self.logger.debug(" ".join(mock_clean_command))
+ try:
+ subprocess.check_call(mock_clean_command,
+ shell=False,
+ stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as err:
+ raise PackagebuildingError("Mock chroot removal failed. Error code %s" % (err.returncode))
+ time_delta = datetime.datetime.now() - time_start
+ self.logger.debug('[mock-end] cmd="%s" took=%s (%s sec)', mock_clean_command, time_delta, time_delta.seconds)
+
+ def run_builddep(self, specfile, resultdir, configdir, root):
+ arguments = ["--copyin"]
+ arguments.append(specfile)
+ arguments.append("/builddir/"+os.path.basename(specfile))
+
+ mocklogfile = resultdir + '/mock-builddep.log'
+ self.run_mock_command(arguments, mocklogfile, configdir, root)
+
+ builddepcommand = "/usr/bin/yum-builddep -y "+"/builddir/"+os.path.basename(specfile)
+ arguments = ["--chroot",
+ builddepcommand]
+ mocklogfile = resultdir + '/mock-builddep.log'
+ return self.run_mock_command(arguments, mocklogfile, configdir, root, True) == 0
+
+ def run_mock_command(self, arguments, outputfile, configdir, root, return_error=False):
+ """ Mock binary rpm package """
+ mock_command = ["/usr/bin/mock",
+ "--configdir=" + configdir,
+ "--root=" + root,
+ "--uniqueext=" + self.masterargs.uniqueext,
+ "--verbose",
+ "--old-chroot",
+ "--enable-network"]
+ mock_command.extend(arguments)
+ if self.masterargs.mockarguments:
+ mock_command.extend([self.masterargs.mockarguments])
+ self.logger.info("Running mock. Log goes to %s", outputfile)
+ self.logger.debug('[mock-start] cmd="%s"', mock_command)
+ time_start = datetime.datetime.now()
+ self.logger.debug(" ".join(mock_command))
+ with open(outputfile, 'a') as filep:
+ try:
+ mockproc = subprocess.Popen(mock_command,
+ shell=False,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ for line in iter(mockproc.stdout.readline, b''):
+ if self.masterargs.verbose:
+ self.logger.debug("mock-%s", line.rstrip('\n'))
+ filep.write(line)
+ _, stderr = mockproc.communicate() # wait for the subprocess to exit
+ if return_error:
+ return mockproc.returncode
+ if mockproc.returncode != 0:
+ raise Mockcommanderror(returncode=mockproc.returncode)
+ except Mockcommanderror as err:
+ self.logger.error("There was a failure during mocking")
+ if self.masterargs.scrub:
+ self.scrub_mock_chroot(configdir, root)
+ guidance_message = ""
+ else:
+ mock_shell_command = ["/usr/bin/mock",
+ "--configdir=" + configdir,
+ "--root=" + root,
+ "--uniqueext=" + self.masterargs.uniqueext,
+ "--shell"]
+ guidance_message = ". To open mock shell, run the following: " + " ".join(mock_shell_command)
+ raise PackagebuildingError("Mock exited with value \"%s\". "
+ "Log for debuging: %s %s" % (err.returncode, outputfile, guidance_message))
+ except OSError:
+ raise PackagebuildingError("Mock executable not found. "
+ "Have you installed mock?")
+ except:
+ raise
+ time_delta = datetime.datetime.now() - time_start
+ self.logger.debug('[mock-end] cmd="%s" took=%s (%s sec)', mock_command, time_delta, time_delta.seconds)
+
+ def clean_directory(self, directory):
+ """ Make sure given directory exists and is clean """
+ if os.path.isdir(directory):
+ shutil.rmtree(directory)
+ os.makedirs(directory)
+
+ def tar_filter(self, tarinfo):
+ """ Filter git related and spec files away """
+ if tarinfo.name.endswith('.spec') or tarinfo.name.endswith('.git'):
+ self.logger.debug("Ignore %s", tarinfo.name)
+ return None
+ self.logger.debug("Archiving %s", tarinfo.name)
+ return tarinfo
+
+ def create_source_archive(self,
+ package_name,
+ sourcedir,
+ outputdir,
+ project_changed,
+ archive_file_extension):
+ """
+ Create tar file. Example helloworld-2.4.tar.gz
+ Tar file has naming <name>-<version>.tar.gz
+ """
+ Prettyprint().print_heading("Tar package creation", 50)
+
+ tar_file = package_name + '.' + 'tar'
+ # Directory where tar should be stored.
+ # Example /var/mybuild/workspace/sources
+
+ tarfilefullpath = os.path.join(outputdir, tar_file)
+ if os.path.isfile(tarfilefullpath) and not project_changed:
+ self.logger.info("Using cached %s", tarfilefullpath)
+ return tarfilefullpath
+
+ self.logger.info("Creating tar file %s", tarfilefullpath)
+ # sourcedir = /var/mybuild/helloworld/checkout
+ # sourcedir_dirname = /var/mybuild/helloworld
+ # sourcedir_basename = checkout
+ sourcedir_dirname = os.path.dirname(sourcedir)
+
+ os.chdir(sourcedir_dirname)
+
+ tar_params = ["tar", "cf", tarfilefullpath, "--directory="+os.path.dirname(sourcedir)]
+ tar_params = tar_params+["--exclude-vcs"]
+ tar_params = tar_params+["--transform=s/" + os.path.basename(sourcedir) + "/" + os.path.join(package_name) + "/"]
+ tar_params = tar_params+[os.path.basename(sourcedir)]
+ self.logger.debug("Running: %s", " ".join(tar_params))
+ ret = subprocess.call(tar_params)
+ if ret > 0:
+ raise PackagebuildingError("Tar error: %s", ret)
+
+ git_dir = os.path.join(os.path.basename(sourcedir), '.git')
+ if os.path.exists(git_dir):
+ tar_params = ["tar", "rf", tarfilefullpath, "--directory="+os.path.dirname(sourcedir)]
+ tar_params += ["--transform=s/" + os.path.basename(sourcedir) + "/" + os.path.join(package_name) + "/"]
+ tar_params += ['--dereference', git_dir]
+ self.logger.debug("Running: %s", " ".join(tar_params))
+ ret = subprocess.call(tar_params)
+ if ret > 1:
+ self.logger.warning("Git dir tar failed")
+
+ if archive_file_extension == "tar.gz":
+ if PIGZ_INSTALLED:
+ cmd = ['pigz', '-f']
+ else:
+ cmd = ['gzip', '-f']
+ resultfile = tarfilefullpath + '.gz'
+ else:
+ raise PackagebuildingError("Unknown source archive format: %s" % archive_file_extension)
+ cmd += [tarfilefullpath]
+ self.logger.debug("Running: %s", " ".join(cmd))
+ ret = subprocess.call(cmd)
+ if ret > 0:
+ raise PackagebuildingError("Cmd error: %s", ret)
+
+ return resultfile
+
+class Mockcommanderror(RpmbuilderError):
+ def __init__(self, returncode):
+ self.returncode = returncode
+
+class PackagebuildingError(RpmbuilderError):
+
+ """ Exceptions originating from Builder and main level """
+ pass
--- /dev/null
+# Copyright 2019 Nokia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""" Pretty printer provides pretty printing """
+import logging
+
+class Prettyprint(object):
+ """ Pretty and common printing for all modules """
+
+ @staticmethod
+ def print_heading(string, weight):
+ """ Print pretty heading title """
+ logger = logging.getLogger(__name__)
+ logger.info(str(" " + string + " ").center(weight, "*"))
--- /dev/null
+# Copyright 2019 Nokia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Project is a subsystem which contains one spec file which
+defines how it is build. Every project has one git
+repository from where it is cloned from.
+"""
+import glob
+import json
+import logging
+import os
+import shutil
+import subprocess
+
+import re
+
+import datetime
+from rpmbuilder.baseerror import RpmbuilderError
+from rpmbuilder.prettyprinter import Prettyprint
+from rpmbuilder.rpmtools import Repotool, Specworker, RepotoolError, SpecError
+from rpmbuilder.utils import find_files
+from rpmbuilder.version_control import VersionControlSystem, VcsError
+from rpmbuilder.get_sources import get_sources
+
+
+class Project(object):
+
+ """ Instance of a project """
+
+ def __init__(self, name, workspace, projects, builders, packagebuilder, chrootscrub=True, nosrpm=False):
+ self.name = name
+
+ self.logger = logging.getLogger(__name__ + "." + self.name)
+
+ self.project_rebuild_needed = False
+
+ self.project_workspace = os.path.join(workspace,
+ 'projects',
+ self.name)
+
+ self.projects = projects
+ self.builders = builders
+ self.directory_of_specpatch = os.path.join(self.project_workspace,
+ 'rpmbuild',
+ 'spec')
+ self.directory_of_sourcepackage = os.path.join(self.project_workspace,
+ 'rpmbuild',
+ 'sources')
+ self.directory_of_srpms = os.path.join(self.project_workspace,
+ 'rpmbuild',
+ 'srpm')
+ self.directory_of_rpm = os.path.join(self.project_workspace,
+ 'rpmbuild',
+ 'rpm')
+ self.directory_of_commonrepo = os.path.join(workspace,
+ 'buildrepository')
+
+ self.directory_of_builder = self.builders.get_configdir()
+
+ self.__create_directories([self.directory_of_specpatch,
+ self.directory_of_srpms],
+ verify_empty=True)
+ self.__create_directories([self.directory_of_sourcepackage],
+ verify_empty=False)
+
+ self.packagebuilder = packagebuilder
+
+ self.chrootscrub = chrootscrub
+ self.built = {}
+ for mockroot in builders.roots:
+ self.built[mockroot] = False
+
+ self.project_changed = False
+ self.projconf = None
+ self.spec = None
+ self.useversion = None
+ self.directory_of_checkout = None
+ self.nosrpm = nosrpm
+ self.centos_style = False
+ self.buildrequires_downstream = set()
+ self.buildrequires_upstream = set()
+
+ def mark_for_rebuild(self):
+ """ Marking project for rebuild only if project has not changed """
+ if not self.project_changed:
+ self.logger.debug("Marking project %s for rebuild.", self.name)
+ self.project_rebuild_needed = True
+
+ def mark_downstream_for_rebuild(self, marked_for_build=None):
+ """
+ Recursively mark downstream projects for rebuilding.
+ Return set of projects marked for rebuild
+ """
+ if marked_for_build is None:
+ marked_for_build = set()
+ self.logger.debug("Marking downstream for rebuild in \"%s\"",
+ self.name)
+ for project in self.who_buildrequires_me():
+ self.logger.debug("BuildRequires to \"%s\" found in \"%s\"",
+ self.name, project)
+ if project in marked_for_build:
+ self.logger.debug("\"%s\" already marked for build", project)
+ elif self.projects[project].project_rebuild_needed:
+ self.logger.debug("\"%s\" already marked for rebuild", project)
+ else:
+ self.projects[project].mark_for_rebuild()
+ marked_for_build.add(project)
+ # Check if downstream has downstream projects
+ tmpset = self.projects[project].mark_downstream_for_rebuild(
+ marked_for_build)
+ marked_for_build.update(tmpset)
+ return marked_for_build
+
+ def build_project(self, mockroot):
+ """ Do building of SRPM and RPM files """
+ time_start = datetime.datetime.now()
+ Prettyprint().print_heading("Build " + self.name, 60)
+ assert not self.built[mockroot], "Project already built"
+
+ # Produce spec file
+ if self.spec.version == '%{_version}':
+ self.logger.debug("patching spec file")
+ self.logger.debug("Version in spec is going to be %s", self.useversion)
+
+ rpm = Repotool()
+ userelease = rpm.next_release_of_package(
+ os.path.join(self.directory_of_commonrepo,
+ self.builders.roots[0],
+ "rpm"),
+ self.spec.name,
+ self.useversion,
+ self.spec.release)
+ self.logger.debug("Release in spec is going to be %s", userelease)
+
+ specfile = self.packagebuilder.patch_specfile(self.spec.specfilefullpath,
+ self.directory_of_specpatch,
+ self.useversion,
+ userelease)
+ else:
+ self.logger.debug("Skipping spec patching")
+ specfile = self.spec.specfilefullpath
+
+ # Start mocking
+ self.logger.debug("Starting building in root \"%s\"", mockroot)
+ if self.centos_style:
+ shutil.rmtree(self.directory_of_sourcepackage)
+ ignore_git = shutil.ignore_patterns('.git')
+ shutil.copytree(self.directory_of_checkout, self.directory_of_sourcepackage, ignore=ignore_git)
+ sources_key = 'CENTOS_SOURCES'
+ if sources_key not in os.environ:
+ raise RpmbuilderError('Cannot build CentOS style RPM, %s not defined in the environment' % sources_key)
+ get_sources(self.directory_of_sourcepackage, os.environ[sources_key].split(','), self.logger)
+ self.create_rpm_from_filesystem(self.directory_of_sourcepackage, mockroot)
+ elif self.nosrpm:
+ list_of_source_packages = self.get_source_package()
+ self.create_rpm_from_archive(list_of_source_packages, mockroot)
+ else:
+ self.get_source_package()
+ # Create source RPM file
+ sourcerpm = self.get_source_rpm(self.directory_of_sourcepackage, specfile, mockroot)
+
+ # Create final RPM file(s)
+ self.create_rpm_from_srpm(sourcerpm, mockroot)
+
+ # Mark build completed
+ self.built[mockroot] = True
+ time_delta = datetime.datetime.now() - time_start
+ self.logger.info('Building success: %s (took %s [%s sec])', self.name, time_delta, time_delta.seconds)
+
+ # We wipe buildroot of previously built rpm, source etc. packages
+ # This is custom cleaning which does not remove chroot
+ self.packagebuilder.mock_wipe_buildroot(self.project_workspace, self.directory_of_builder, mockroot)
+
+ def pull_source_packages(self, target_dir):
+ cmd = ['/usr/bin/spectool', '-d', 'KVERSION a.b', '-g', '--directory', target_dir, self.spec.specfilefullpath]
+ self.logger.info('Pulling source packages: %s', cmd)
+ try:
+ subprocess.check_call(cmd, shell=False)
+ self.logger.info('Pulling source packages ok')
+ except OSError as err:
+ self.logger.info('Pulling source packages nok %s', err.strerror)
+ raise RepotoolError("Calling of command spectool caused: \"%s\"" % err.strerror)
+ except:
+ self.logger.info('Pulling source packages nok ??', err.strerror)
+ raise RepotoolError("There was error pulling source content")
+
+ def get_source_package(self):
+ # Produce source package
+ source_package_list = []
+ for source_file_hit in self.spec.source_files:
+ self.logger.info("Acquiring source file \"%s\"", source_file_hit)
+ if re.match(r'^(http[s]*|ftp)://', source_file_hit):
+ self.logger.info("PULL %s", self.directory_of_sourcepackage)
+ self.pull_source_packages(self.directory_of_sourcepackage)
+ source_package_list.append(self.directory_of_sourcepackage + '/' + source_file_hit.split('/')[-1])
+ continue
+ for subdir in ["", "SOURCES"]:
+ if os.path.isfile(os.path.join(self.directory_of_checkout, subdir, source_file_hit)):
+ shutil.copy(os.path.join(self.directory_of_checkout, subdir, source_file_hit), self.directory_of_sourcepackage)
+ source_package_list.append(os.path.join(self.directory_of_sourcepackage, source_file_hit))
+ break
+ else:
+ tarname = self.spec.name + '-' + self.useversion
+ source_package_list.append(self.packagebuilder.create_source_archive(tarname,
+ self.directory_of_checkout,
+ self.directory_of_sourcepackage,
+ self.project_changed,
+ self.spec.source_file_extension))
+
+ for patch_file_hit in self.spec.patch_files:
+ self.logger.info("Copying %s to directory %s", patch_file_hit, self.directory_of_sourcepackage)
+ for subdir in ["", "SOURCES"]:
+ if os.path.isfile(os.path.join(self.directory_of_checkout, subdir, patch_file_hit)):
+ shutil.copy(os.path.join(self.directory_of_checkout, subdir, patch_file_hit), self.directory_of_sourcepackage)
+ break
+ else:
+ raise ProjectError("Spec file lists patch \"%s\" but no file found" % patch_file_hit)
+ return source_package_list
+
+
+ def get_source_rpm(self, hostsourcedir, specfile, mockroot):
+ return self.packagebuilder.mock_source_rpm(hostsourcedir,
+ specfile,
+ self.directory_of_srpms,
+ self.directory_of_builder,
+ mockroot)
+
+ def create_rpm_from_srpm(self, sourcerpm, mockroot):
+ directory_of_rpm = os.path.join(self.directory_of_rpm, mockroot)
+ self.packagebuilder.mock_rpm(sourcerpm,
+ directory_of_rpm,
+ self.directory_of_builder,
+ mockroot)
+ # Delete duplicated src.rpm which is returned by rpm creation
+ os.remove(os.path.join(directory_of_rpm, os.path.basename(sourcerpm)))
+
+ def create_rpm_from_archive(self, source_tar_packages, mockroot):
+ directory_of_rpm = os.path.join(self.directory_of_rpm, mockroot)
+ self.packagebuilder.mock_rpm_from_archive(source_tar_packages, directory_of_rpm, self.directory_of_builder, mockroot)
+
+ def create_rpm_from_filesystem(self, path, mockroot):
+ directory_of_rpm = os.path.join(self.directory_of_rpm, mockroot)
+ self.packagebuilder.mock_rpm_from_filesystem(path,
+ self.spec.specfilename,
+ directory_of_rpm,
+ self.directory_of_builder,
+ mockroot,
+ self.directory_of_srpms)
+
+ def list_buildproducts_for_mockroot(self, mockroot):
+ """ List both source and final rpm packages """
+ srpmlist = []
+ rpmlist = []
+ for occurence in os.listdir(os.path.join(self.directory_of_rpm, mockroot)):
+ if occurence.endswith(".rpm"):
+ rpmlist.append(occurence)
+ for occurence in os.listdir(self.directory_of_srpms):
+ if occurence.endswith(".src.rpm"):
+ srpmlist.append(occurence)
+ return rpmlist, srpmlist
+
+ def resolve_dependencies(self, mockroot):
+ return self.packagebuilder.run_builddep(self.spec.specfilefullpath,
+ self.directory_of_srpms,
+ self.directory_of_builder,
+ mockroot)
+
+ def store_build_products(self, commonrepo):
+ """ Save build products under common yum repository """
+ self.__create_directories([commonrepo])
+ for mockroot in self.builders.roots:
+ srpmtargetdir = os.path.join(commonrepo, mockroot, 'srpm')
+ rpmtargetdir = os.path.join(commonrepo, mockroot, 'rpm')
+ self.__create_directories([srpmtargetdir, rpmtargetdir])
+ (rpmlist, srpmlist) = self.list_buildproducts_for_mockroot(mockroot)
+ build_product_dir = os.path.join(self.directory_of_rpm, mockroot)
+ self.logger.debug("Hard linking %s rpm packages to %s", self.name, rpmtargetdir)
+ for rpm_file in rpmlist:
+ self.logger.info("Hard linking %s", rpm_file)
+ try:
+ os.link(os.path.join(build_product_dir, rpm_file),
+ os.path.join(rpmtargetdir, os.path.basename(rpm_file)))
+ except OSError:
+ pass
+ self.logger.debug("Hard linking %s srpm packages to %s", self.name, srpmtargetdir)
+ for srpm_file in srpmlist:
+ self.logger.info("Hard linking %s", srpm_file)
+ try:
+ os.link(os.path.join(self.directory_of_srpms, srpm_file),
+ os.path.join(srpmtargetdir, srpm_file))
+ except OSError:
+ pass
+
+ # Store info of latest build
+ self.store_project_status()
+
+
+ def who_buildrequires_me(self):
+ """
+ Return a list of projects which directly buildrequires this project (non-recursive)
+ """
+ downstream_projects = set()
+ # Loop through my packages
+ for package in self.spec.packages:
+ # Loop other projects and check if they need me
+ # To need me, they have my package in buildrequires
+ for project in self.projects:
+ if package in self.projects[project].spec.buildrequires:
+ self.logger.debug("Found dependency in {}: my package {} is required by project {}".format(self.name, package, project))
+ self.projects[project].buildrequires_upstream.add(self.name)
+ self.projects[self.name].buildrequires_downstream.add(project)
+ downstream_projects.add(project)
+ return downstream_projects
+
+
+ def who_requires_me(self, recursive=False, depth=0):
+ """
+ Return a list of projects which have requirement to this project
+ """
+ if depth > 10:
+ self.logger.warn("Hit infinite recursion limiter in {}".format(self.name))
+ recursive = False
+ # Loop through my packages
+ downstream_projects = set()
+ for package in self.spec.packages:
+ # Loop other projects and check if they need me
+ # To need me, they have my package in buildrequires or requires
+ for project in self.projects:
+ if package in self.projects[project].spec.buildrequires \
+ or package in self.projects[project].spec.requires:
+ downstream_projects.add(project)
+ if recursive:
+ downstream_projects.update(
+ self.projects[project].who_requires_me(True, depth+1))
+ self.logger.debug("Returning who_requires_me for %s: %s",
+ self.name, ', '.join(downstream_projects))
+ return downstream_projects
+
+ def get_project_changed(self):
+ raise NotImplementedError
+
+ def store_project_status(self):
+ raise NotImplementedError
+
+ def __create_directories(self, directories, verify_empty=False):
+ """ Directory creation """
+ for directory in directories:
+ if os.path.isdir(directory):
+ if verify_empty and os.listdir(directory) != []:
+ self.logger.debug("Cleaning directory %s", directory)
+ globstring = directory + "/*"
+ files = glob.glob(globstring)
+ for foundfile in files:
+ self.logger.debug("Removing file %s", foundfile)
+ os.remove(foundfile)
+ else:
+ self.logger.debug("Creating directory %s", directory)
+ try:
+ os.makedirs(directory)
+ except OSError:
+ raise
+ return True
+
+class LocalMountProject(Project):
+ """ Projects coming from local disk mount """
+ def __init__(self, name, directory, workspace, projects, builders, packagebuilder, masterargs, spec_path):
+ chrootscrub = masterargs.scrub
+ nosrpm = masterargs.nosrpm
+ forcebuild = masterargs.forcerebuild
+
+ Prettyprint().print_heading("Initializing %s from disk" % name, 60)
+ super(LocalMountProject, self).__init__(name, workspace, projects, builders, packagebuilder)
+
+ if not os.path.isdir(directory):
+ raise ProjectError("No directory %s found", directory)
+
+ self.vcs = VersionControlSystem(directory)
+ self.directory_of_checkout = directory
+
+ # Values from build configuration file
+ self.projconf = {}
+ # Read spec
+ if len(list(find_files(directory, r'\..+\.metadata$'))) > 0 and \
+ os.path.isdir(os.path.join(directory, 'SOURCES')) and \
+ os.path.isdir(os.path.join(directory, 'SPECS')):
+ self.centos_style = True
+ self.logger.debug('CentOS stype RPM detected')
+ self.spec = Specworker(os.path.dirname(spec_path), os.path.basename(spec_path))
+
+ self.gitversioned = False
+ try:
+ citag = self.vcs.get_citag()
+ self.gitversioned = True
+ except VcsError:
+ self.logger.debug("Project does not come from Git")
+ except:
+ raise
+
+ if self.spec.version == '%{_version}':
+ if self.gitversioned:
+ self.logger.debug("Using Git describe for package version")
+ self.useversion = citag
+ else:
+ self.logger.debug("Project not from Git. Using a.b package version")
+ self.useversion = 'a.b'
+ else:
+ self.logger.debug("Using spec definition for package version")
+ self.useversion = self.spec.version
+
+ self.packageversion = self.useversion
+ self.project_changed = self.get_project_changed()
+ self.nosrpm = nosrpm
+
+ if forcebuild:
+ self.mark_for_rebuild()
+
+ self.chrootscrub = chrootscrub
+
+ def get_project_changed(self):
+ """
+ Project status is read from status.txt file. Dirty git clones always require rebuild.
+ """
+ statusfile = os.path.join(self.project_workspace, 'status.txt')
+
+ if os.path.isfile(statusfile):
+ with open(statusfile, 'r') as filep:
+ previousprojectstatus = json.load(filep)
+ # Compare old values against new values
+ if not self.gitversioned:
+ self.logger.warning("Project %s is not git versioned. Forcing rebuild.", self.name)
+ return True
+ elif self.vcs.is_dirty():
+ self.logger.warning("Project %s contains unversioned changes and is \"dirty\". Forcing rebuild.", self.name)
+ return True
+ elif previousprojectstatus['sha'] != self.vcs.commitsha:
+ self.logger.info("Project %s log has new hash. Rebuild needed.", self.name)
+ return True
+ else:
+ self.logger.info("Project %s has NO new changes.", self.name)
+ return False
+ else:
+ # No configuration means that project has not been compiled
+ self.logger.warning("No previous build found for %s. Building initial version.", self.name)
+ return True
+
+ def store_project_status(self):
+ """ Write information of project version to status.txt
+ This can only be done for git versioned projects """
+ if self.gitversioned:
+ # Save information of the last compilation
+ statusfile = os.path.join(self.project_workspace, 'status.txt')
+ self.logger.debug("Updating status file %s", statusfile)
+
+ projectstatus = {"packageversion": self.packageversion,
+ "sha": self.vcs.commitsha,
+ "project": self.name}
+
+ with open(statusfile, 'w') as outfile:
+ json.dump(projectstatus, outfile)
+
+class GitProject(Project):
+ """ Projects cloned from Git version control system """
+ def __init__(self, name, workspace, conf, projects, builders, packagebuilder, masterargs):
+ forcebuild = masterargs.forcerebuild
+ chrootscrub = masterargs.scrub
+
+ Prettyprint().print_heading("Initializing %s from Git" % name, 60)
+ super(GitProject, self).__init__(name, workspace, projects, builders, packagebuilder)
+
+ # Values from build configuration file
+ self.projconf = {'url': conf.get_string(name, "url", mandatory=True),
+ 'ref': conf.get_string(name, "ref", mandatory=True),
+ 'spec': conf.get_string(name, "spec", mandatory=False, defaultvalue=None)}
+
+ # Do version control updates
+ self.directory_of_checkout = os.path.join(self.project_workspace,
+ 'checkout')
+ self.vcs = VersionControlSystem(self.directory_of_checkout)
+ self.vcs.update_git_project(self.projconf["url"], self.projconf["ref"])
+ self.useversion = self.vcs.get_citag()
+
+ # Read spec
+ try:
+ self.spec = Specworker(self.directory_of_checkout,
+ self.projconf["spec"])
+ except SpecError:
+ self.spec = Specworker(os.path.join(self.directory_of_checkout, "SPEC"), None)
+ self.centos_style = True
+
+ # Define what version shall be used in spec file
+ if self.spec.version == '%{_version}':
+ self.packageversion = self.vcs.get_citag()
+ self.logger.debug("Taking package version from VCS")
+ else:
+ self.packageversion = self.spec.version
+ self.logger.debug("Taking package version from spec")
+ self.logger.debug("Package version: %s", self.packageversion)
+
+ self.project_changed = self.get_project_changed()
+ if forcebuild:
+ self.mark_for_rebuild()
+
+ self.chrootscrub = chrootscrub
+
+ def get_project_changed(self):
+ """
+ Check if there has been changes in the project
+ if project has not been compiled -> return = True
+ if project has GIT/VCS changes -> return = True
+ if project has not changed -> return = False
+ """
+ statusfile = os.path.join(self.project_workspace, 'status.txt')
+
+ if os.path.isfile(statusfile):
+ with open(statusfile, 'r') as filep:
+ previousprojectstatus = json.load(filep)
+ # Compare old values against new values
+ if previousprojectstatus['url'] != self.projconf["url"] \
+ or previousprojectstatus['ref'] != self.projconf["ref"] \
+ or previousprojectstatus['sha'] != self.vcs.commitsha:
+ self.logger.debug("Returning info that changes found")
+ return True
+ else:
+ self.logger.debug("Returning info of NO changes")
+ return False
+ else:
+ # No configuration means that project has not been compiled
+ self.logger.debug("Doing first build of this project")
+ return True
+
+ def store_project_status(self):
+ """ Save information of the last compilation """
+ statusfile = os.path.join(self.project_workspace, 'status.txt')
+ self.logger.debug("Updating status file %s", statusfile)
+
+ projectstatus = {"url": self.projconf["url"],
+ "ref": self.projconf["ref"],
+ "spec": self.projconf["spec"],
+ "packageversion": self.packageversion,
+ "sha": self.vcs.commitsha,
+ "project": self.name}
+
+ with open(statusfile, 'w') as outfile:
+ json.dump(projectstatus, outfile)
+
+class ProjectError(RpmbuilderError):
+
+ """ Exceptions originating from Project """
+ pass
--- /dev/null
+# Copyright 2019 Nokia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""For handling rpm related work"""
+import logging
+import os
+import re
+import subprocess
+from rpmUtils.miscutils import splitFilename
+
+from rpmbuilder.baseerror import RpmbuilderError
+from rpmbuilder.executor import Executor
+
+
+class Specworker(object):
+ """ Working with spec files """
+
+ def __init__(self, directory, specfile=None):
+ self.logger = logging.getLogger(__name__)
+ if specfile:
+ if self.__verify_specfile_exists(os.path.join(directory, specfile)):
+ self.specfilename = specfile
+ else:
+ self.logger.critical("Specfile %s not found", specfile)
+ raise SpecError("Spec file not found")
+ else:
+ self.specfilename = self.__locate_spec_file(directory)
+
+ self.specfilefullpath = os.path.join(directory, self.specfilename)
+
+ self.name = ""
+ self.version = ""
+ self.release = ""
+ self.source_files = []
+ self.source_file_extension = None
+ self.patch_files = []
+ self.buildrequires = []
+ self.requires = []
+ self.packages = []
+ self.files = []
+ self.spec_globals = {}
+ self.read_spec()
+
+ def __str__(self):
+ return 'name:%s version:%s' % (self.name, self.version)
+
+ def __getattr__(self, item):
+ return self.spec_globals.get(item)
+
+ @staticmethod
+ def __locate_spec_file(directory):
+ """ Finding spec files from directory """
+ logger = logging.getLogger(__name__)
+ logger.debug("Searching for spec files under: %s", directory)
+ specfile = ''
+
+ for occurence in os.listdir(directory):
+ filefullpath = os.path.join(directory, occurence)
+ if os.path.isfile(filefullpath) and filefullpath.endswith(".spec"):
+ logger.info("Found spec file: %s", occurence)
+ if specfile:
+ logger.critical("Project has more than one spec files."
+ "I don't know which one to use.")
+ raise SpecError("Multiple spec files")
+ else:
+ specfile = occurence
+ if specfile:
+ return specfile
+ else:
+ raise SpecError("No spec file available")
+
+ def _read_spec_sources(self):
+ cmd = ['spectool', '-n', '-S', self.specfilefullpath]
+ sources = self._parse_spectool_output(Executor().run(cmd))
+ self.source_file_extension = self.__get_source_file_extension(sources[0])
+ return sources
+
+ def _read_spec_patches(self):
+ cmd = ['spectool', '-n', '-P', self.specfilefullpath]
+ return self._parse_spectool_output(Executor().run(cmd))
+
+ def _parse_spectool_output(self, output):
+ return [line.split(':', 1)[1].strip() for line in output.splitlines()]
+
+ def _get_package_names(self):
+ cmd = ['rpm', '-q', '--qf', '%{NAME}\n', '--specfile', self.specfilefullpath]
+ return Executor().run(cmd).splitlines()
+
+ def _get_version(self):
+ cmd = ['rpmspec', '-q', '--queryformat', '%{VERSION}\n', self.specfilefullpath]
+ return Executor().run(cmd).splitlines()[0]
+
+ def read_spec(self):
+ """ Reading spec file values to variables """
+ self.logger.debug("Reading spec file %s", self.specfilefullpath)
+ self.source_files = self._read_spec_sources()
+ self.patch_files = self._read_spec_patches()
+ self.packages = self._get_package_names()
+ self.name = self.packages[0]
+ self.version = self._get_version()
+
+ with open(self.specfilefullpath, 'r') as filep:
+ name_found = False
+ for line in filep:
+ linestripped = line.strip()
+
+ if linestripped.startswith("#") or not linestripped:
+ continue
+
+ if linestripped.lower().startswith("%global"):
+ try:
+ var, val = re.match(r'^%global (\w+) (.+)$', linestripped).groups()
+ self.spec_globals[var] = val
+ except Exception as err:
+ logger = logging.getLogger(__name__)
+ logger.warning(
+ 'Failed to parse %global macro "{}" (error: {})'.format(linestripped,
+ str(err)))
+
+ elif linestripped.lower().startswith("buildrequires:"):
+ self.buildrequires.extend(self.__get_value_from_line(linestripped))
+
+ elif linestripped.lower().startswith("requires:"):
+ self.requires.extend(self.__get_value_from_line(linestripped))
+
+ elif linestripped.lower().startswith("release:"):
+ templist = self.__get_value_from_line(linestripped)
+ self.release = templist[0]
+
+ elif linestripped.lower().startswith("name:"):
+ name_found = True
+
+ elif linestripped.lower().startswith("%package"):
+ if not name_found:
+ self.logger.error(
+ "SPEC file is faulty. Name of the package should be defined before defining subpackages")
+ raise SpecError(
+ "Problem in spec file. Subpackages defined before %packages")
+
+ elif linestripped.lower().startswith("%files"):
+ if name_found:
+ templist = self.__get_package_names_from_line(self.name, linestripped)
+ self.files.extend(templist)
+ else:
+ self.logger.critical(
+ "SPEC file is faulty. Name of the package should be defined before defining subpackages")
+ raise SpecError("Problem in spec file. No %files defined")
+
+ if not self.verify_spec_ok():
+ raise SpecError("Inspect file %s" % self.specfilefullpath)
+ self.logger.info("Reading spec file done: %s", str(self))
+
+ def verify_spec_ok(self):
+ """ Check that spec file contains the necessary building blocks """
+ spec_status = True
+ if not self.name:
+ self.logger.critical("Spec does not have name defined")
+ spec_status = False
+ if not self.version:
+ self.logger.critical("Spec does not contain version")
+ spec_status = False
+ if not self.release:
+ self.logger.critical("Spec does not contain release")
+ spec_status = False
+ if not self.source_file_extension:
+ self.logger.critical(
+ "Spec does not define source information with understandable archive method")
+ spec_status = False
+ return spec_status
+
+ @staticmethod
+ def __get_source_file_extension(line):
+ """ Read source file archive file end """
+
+ if line.endswith('.tar.gz'):
+ return "tar.gz"
+ elif line.endswith('.tgz'):
+ return "tgz"
+ elif line.endswith('.tar'):
+ return "tar"
+ elif line.endswith('.tar.bz2'):
+ return "tar.bz2"
+ elif line.endswith('.tar.xz'):
+ return "tar.xz"
+ elif line.endswith('.zip'):
+ return "zip"
+ else:
+ raise SpecError(
+ "Unknown source archive format. Supported are: tar.gz, tgz, tar, tar.bz2, tar.xz, zip")
+
+ @staticmethod
+ def __get_value_from_line(line):
+ """ Read spec line where values come after double-colon """
+ valuelist = []
+ linewithgroups = re.search('(.*):(.*)$', line)
+ linevalues = linewithgroups.group(2).strip().replace(' ', ',').split(',')
+ for linevalue in linevalues:
+ valuelist.append(linevalue.strip(' \t\n\r'))
+ return valuelist
+
+ @staticmethod
+ def __get_package_names_from_line(name, line):
+ """ Read spec line where package names are defined """
+ linewithgroups = re.search('%(.*) (.*)$', line)
+ if linewithgroups:
+ value = linewithgroups.group(2).strip(' \t\n\r')
+ return [name + '-' + value]
+ return [name]
+
+ def __verify_specfile_exists(self, specfile):
+ """ Check that the given spec file exists """
+ if not specfile.endswith(".spec"):
+ self.logger.error("Given specfile %s does not end with .spec prefix", specfile)
+ return False
+
+ if os.path.isfile(specfile):
+ return True
+ self.logger.error("Could not locate specfile %s", specfile)
+ return False
+
+
+class Repotool(object):
+ """ Module for handling rpm related functions """
+
+ def __init__(self):
+ self.logger = logging.getLogger(__name__)
+
+ def createrepo(self, directory):
+ """ Create a yum repository of the given directory """
+ createrepo_executable = "/usr/bin/createrepo"
+ createrepocommand = [createrepo_executable, '--update', directory]
+ outputfile = os.path.join(directory, 'log.txt')
+ with open(outputfile, 'w') as filep:
+ try:
+ subprocess.check_call(createrepocommand, shell=False, stdout=filep,
+ stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError:
+ self.logger.critical("There was error running createrepo")
+ raise RepotoolError("There was error running createrepo")
+ except OSError:
+ self.logger.error(createrepo_executable + "command not available")
+ raise RepotoolError("No createrepo tool available")
+
+ def latest_release_of_package(self, directory, package, version):
+ """ Return latest release of the given package """
+ self.logger.debug("Looking for latest %s - %s under %s",
+ package, version, directory)
+ latest_found_release = 0
+ if os.path.isdir(directory):
+ for occurence in os.listdir(directory):
+ filefullpath = os.path.join(directory, occurence)
+ if os.path.isfile(filefullpath) \
+ and filefullpath.endswith(".rpm") \
+ and not filefullpath.endswith(".src.rpm"):
+ (rpmname, rpmversion, rpmrelease, _, _) = splitFilename(occurence)
+ if rpmname == package and rpmversion == version:
+ self.logger.debug("Found rpm " + filefullpath)
+ if latest_found_release < rpmrelease:
+ self.logger.debug("Found rpm to match and to be the latest")
+ latest_found_release = rpmrelease
+ if latest_found_release == 0:
+ self.logger.debug("Did not find any previous releases of %s", package)
+ return str(latest_found_release)
+
+ def next_release_of_package(self, directory, package, version, oldrelease):
+ """ Return next release of the given package """
+ self.logger.debug("Looking for next release number for %s - %s under %s ", package, version,
+ directory)
+
+ specreleasematch = re.search('^([0-9]+)(.*)$', oldrelease)
+ if specreleasematch and specreleasematch.group(2):
+ releasesuffix = specreleasematch.group(2)
+ else:
+ releasesuffix = ''
+
+ latest_release = self.latest_release_of_package(directory, package, version)
+ self.logger.debug("Latest release of the package: " + latest_release)
+ rematches = re.search('^([0-9]+)(.*)$', latest_release)
+ if rematches.group(1).isdigit():
+ nextrelease = str(int(rematches.group(1)) + 1) + releasesuffix
+ self.logger.debug("Next release of the package: " + nextrelease)
+ return nextrelease
+ else:
+ self.logger.critical("Could not parse release \"%s\" from package \"%s\"",
+ latest_release, package)
+ raise RepotoolError("Could not process release in rpm")
+
+
+class RepotoolError(RpmbuilderError):
+ """ Exceptions originating from repotool """
+ pass
+
+
+class SpecError(RpmbuilderError):
+ """ Exceptions originating from spec content """
+ pass
--- /dev/null
+#! /usr/bin/python -tt
+# Copyright 2019 Nokia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import os
+import re
+
+
+def find_files(path, pattern=None):
+ for root, folders, files in os.walk(path):
+ for filename in folders + files:
+ if pattern is not None:
+ if re.search(pattern, filename):
+ yield os.path.join(root, filename)
+ else:
+ yield os.path.join(root, filename)
--- /dev/null
+# Copyright 2019 Nokia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Module for handling git repository clones
+"""
+import logging
+import os
+import re
+import subprocess
+from time import strftime, localtime
+
+from rpmbuilder.baseerror import RpmbuilderError
+
+
+class VersionControlSystem(object):
+ """ Handling of project's repositories """
+
+ def __init__(self, clone_target_dir):
+ self.logger = logging.getLogger(__name__)
+ self.clone_target_dir = clone_target_dir
+ self.citag = None
+ self.commitsha = None
+ self.commitauth = None
+ self.commitepocdate = None
+ self.commitmessage = None
+ self.describe = None
+ try:
+ self.__store_head_state()
+ except VcsError:
+ pass
+
+ def update_git_project(self, url, usergivenref):
+ """ Update of a single repository based on given reference """
+ self.logger.info("%-18s: %s", "Git cloning from", url)
+ self.logger.info("%-18s: %s", "Git cloning to", self.clone_target_dir)
+ self.logger.info("%-18s: %s", "Git reference", usergivenref)
+
+ # Check if we already have local clone of the repository
+ self.__clone_repo(url)
+
+ # Change to user given ref value.
+ self.__update_head(url, usergivenref)
+
+ self.__store_head_state()
+ self.citag = self.get_citag()
+
+ def __clone_repo(self, url):
+ """ Create a clone from URL. If already exists, update it """
+ if not os.path.isdir(self.clone_target_dir):
+ self.logger.debug("Creating a fresh clone")
+ cmd = ['git', 'clone', url, self.clone_target_dir]
+ self.logger.debug(self.__run_git(cmd))
+ else:
+ self.logger.debug("We already have a clone. Using old clone.")
+ # Remove any possible garbage from clone directory
+ self.logger.debug("Running cleaning of existing repository")
+ cmd = ['git', 'reset', '--hard']
+ self.logger.debug(self.__run_git(cmd, self.clone_target_dir))
+ # Verify that correct remote is being used
+ self.__set_remoteurl(url)
+ # Run fetch twice. From Git 1.9 onwards this is not necessary,
+ # but to make sure of all server compatibility we do it twice
+ self.logger.debug("Fetching latest from remote")
+ cmd = ['git', 'fetch', 'origin']
+ self.logger.debug(self.__run_git(cmd, self.clone_target_dir))
+ cmd = ['git', 'fetch', 'origin', '--tags']
+ self.logger.debug(self.__run_git(cmd, self.clone_target_dir))
+
+ def __update_head(self, url, usergivenref):
+ """ Change head to point to given ref. Ref can also be tag/commit """
+ self.logger.debug("Reseting git head to %s", usergivenref)
+ try:
+ self.logger.debug("Checking out %s as reference", usergivenref)
+ cmd = ['git', 'checkout', '--force', '--detach', 'origin/' + usergivenref]
+ self.logger.debug(self.__run_git(cmd, self.clone_target_dir))
+ except:
+ self.logger.debug("Unable to checkout %s as reference", usergivenref)
+ try:
+ self.logger.debug("Checking out %s as tag/commit", usergivenref)
+ cmd = ['git', 'checkout', '--force', '--detach', usergivenref]
+ self.logger.debug(self.__run_git(cmd, self.clone_target_dir))
+ except GitError:
+ raise VcsError(
+ "Could not checkout branch/ref/commit \"%s\" from %s." % (usergivenref, url))
+
+ def __run_git(self, gitcmd, gitcwd=None):
+ """ Run given git command """
+ assert gitcmd
+ self.logger.debug("Running \'%s\' under directory %s", " ".join(gitcmd), gitcwd)
+ try:
+ return subprocess.check_output(gitcmd,
+ shell=False,
+ cwd=gitcwd)
+ except subprocess.CalledProcessError as err:
+ raise GitError("Could not execute %s command. Return code was %d" % (err.cmd,
+ err.returncode))
+ except:
+ raise
+
+ def __set_remoteurl(self, url):
+ """
+ Verify that repository is using the correct remote URL. If not
+ then it should be changed to the desired one.
+ """
+ self.logger.info("Verifying we have correct remote repository configured")
+ cmd = ["git", "config", "--get", "remote.origin.url"]
+ existing_clone_url = self.__run_git(cmd, self.clone_target_dir).strip()
+ if existing_clone_url != url:
+ self.logger.info("Existing repo has url: %s", existing_clone_url)
+ self.logger.info("Changing repo url to: %s", url)
+ cmd = ["git", "remote", "set-url", "origin", url]
+ self.logger.debug(self.__run_git(cmd, self.clone_target_dir))
+
+ def __store_head_state(self):
+ """ Read checkout values to be used elsewhere """
+ self.logger.info("State of the checkout:")
+
+ try:
+ cmd = ["git", "log", "-1", "--pretty=%H"]
+ self.commitsha = self.__run_git(cmd, self.clone_target_dir).strip()
+ self.logger.info(" %-10s: %s", "SHA", self.commitsha)
+
+ cmd = ["git", "log", "-1", "--pretty=%ae"]
+ self.commitauth = self.__run_git(cmd, self.clone_target_dir).strip()
+ self.logger.info(" %-10s: %s", "Author", self.commitauth)
+
+ cmd = ["git", "log", "-1", "--pretty=%ct"]
+ self.commitepocdate = float(self.__run_git(cmd, self.clone_target_dir).strip())
+ self.logger.info(" %-10s: %s", "Date:",
+ strftime("%a, %d %b %Y %H:%M:%S",
+ localtime(self.commitepocdate)))
+
+ cmd = ["git", "log", "-1", "--pretty=%B"]
+ self.commitmessage = self.__run_git(cmd, self.clone_target_dir).strip()
+ self.logger.info(" %-10s: %s", "Message:", self.commitmessage.split('\n', 1)[0])
+ except GitError:
+ raise VcsError("Directory \"%s\" does not come from vcs" % self.clone_target_dir)
+
+ def is_dirty(self):
+ """ Check the status of directory. Return true if version control is dirty.
+ Git clone is dirty if status shows anything """
+ cmd = ["git", "status", "--porcelain"]
+ return len(self.__run_git(cmd, self.clone_target_dir).strip()) > 0
+
+ def get_citag(self):
+ """ This is for creating the tag for the rpm. """
+
+ if self.citag:
+ return self.citag
+
+ setup_py = os.path.join(self.clone_target_dir, 'setup.py')
+ if os.path.exists(setup_py):
+ with open(setup_py, 'r') as fpoint:
+ if re.search(r'^.*setup_requires=.*pbr.*$', fpoint.read(), re.MULTILINE):
+ cmd = ['python', 'setup.py', '--version']
+ citag = self.__run_git(cmd, self.clone_target_dir).strip()
+ if ' ' in citag or '\n' in citag:
+ # 1st execution output may contains extra stuff such as locally installed eggs
+ citag = self.__run_git(cmd, self.clone_target_dir).strip()
+ return citag
+
+ try:
+ cmd = ["git", "describe", "--dirty", "--tags"]
+ describe = self.__run_git(cmd, self.clone_target_dir).strip()
+ self.logger.debug("Git describe from tags: %s", describe)
+ if re.search("-", describe):
+ # if describe format is 2.3-3-g4324323, we need to modify it
+ dmatch = re.match('^(.*)-([0-9]+)-(g[a-f0-9]{7,}).*$', describe)
+ if dmatch:
+ citag = describe.replace('-', '-c', 1)
+ else:
+ raise Exception('no match, falling back to non-tagged describe')
+ else:
+ # if describe format is 2.3
+ citag = describe
+ except:
+ try:
+ count = self.__run_git(["git", "rev-list", "HEAD", "--count"],
+ self.clone_target_dir).strip()
+ sha = self.__run_git(["git", "describe", "--long", "--always"],
+ self.clone_target_dir).strip()
+ citag = 'c{}.g{}'.format(count, sha)
+ except:
+ raise VcsError("Could not create a name for the package with git describe")
+ # Replace all remaining '-' characters with '.' from version number
+ if re.search("-", citag):
+ citag = re.sub('-', '.', citag)
+ return citag
+
+
+class VcsError(RpmbuilderError):
+ """ Exceptions for all version control error situations """
+ pass
+
+
+class GitError(RpmbuilderError):
+ """ Exceptions for git command errors """
+ pass
--- /dev/null
+#! /usr/bin/python -tt
+# Copyright 2019 Nokia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""" Safebuild is capable of doing backup and restore of workspace.
+This ensures that package repository gets incremental updates and only
+minimal set of packages are created """
+
+import argparse
+import logging
+import os
+import re
+import subprocess
+import tarfile
+
+from rpmbuilder.log import configure_logging
+from makebuild import Build, BuildingError, ArgumentMakebuild
+from stashworkspace import ArgumentRemote, Stasher
+
+
+class Safebuild(Build):
+
+ """ Safebuild extends capabilities of Build by providing backup and
+ restore on top of normal building activities """
+
+ def __init__(self, args):
+ super(Safebuild, self).__init__(args)
+ self.logger = logging.getLogger(__name__)
+ self.args = args
+
+ self.backupfilename = "configuration.tar.gz"
+ self.remotehost = args.remotehost
+ self.remotedir = args.remotedir
+
+ def start_safebuilding(self):
+ """ Starting a build requires checking of workspace, doing build
+ and then backing up the state of build system """
+ self.logger.info("Starting safe building by using remote %s:%s",
+ self.remotehost, self.remotedir)
+ self.prepare_workspace()
+ self.update_building_blocks()
+ if self.start_building():
+ self.backup_workspace()
+ if self.args.remotefunction == "pullpush":
+ stasher = Stasher()
+ stasher.push_workspace_to_remote(toserver=self.remotehost,
+ todirectory=self.remotedir,
+ workspace=self.args.workspace)
+ else:
+ self.logger.info("Skipping updating remote host with new packages")
+
+ def tar_file_from_workspace(self, tar, sourcefile):
+ """ Archiving file from under workspace without
+ workspace parent directory structure """
+ arcfile = os.path.join(self.args.workspace, sourcefile)
+ # Remove workspace directory from file
+ arcnamestring = re.sub(self.args.workspace, '', arcfile)
+ self.logger.debug("Archiving %s", arcfile)
+ tar.add(arcfile, arcname=arcnamestring)
+
+ def backup_workspace(self):
+ """ Backup status files and repositories """
+ backuptarfile = os.path.join(self.args.workspace, self.backupfilename)
+ self.logger.debug("Creating backup of configuration: %s",
+ backuptarfile)
+ with tarfile.open(backuptarfile, 'w:gz') as tar:
+ # Project settings
+ projdir = os.path.join(self.args.workspace, "projects")
+ for occurence in os.listdir(projdir):
+ statusfile = os.path.join(projdir, occurence, 'status.txt')
+ self.logger.info("Backing up file: %s", statusfile)
+ if os.path.isfile(statusfile):
+ self.tar_file_from_workspace(tar, statusfile)
+ else:
+ self.logger.warning("No %s for archiving", statusfile)
+
+ def prepare_workspace(self):
+ """ Check that workspace contains correct beginning state """
+ projectsdir = os.path.join(self.args.workspace, "projects")
+ if os.path.isdir(projectsdir):
+ self.logger.info("Using existing Workspace %s", self.args.workspace)
+ else:
+ self.logger.info("Trying to restore workspace from remote")
+ self.restore_workspace_from_remote(self.remotehost, self.remotedir)
+
+ def restore_workspace_from_remote(self, fromserver, fromdirectory):
+ """ Retrieve and restore workspace from remote server """
+ self.logger.info("Restoring workspace from remote %s:%s", fromserver, fromdirectory)
+ source = fromserver + ":" + fromdirectory
+ sshoptions = 'ssh -o stricthostkeychecking=no -o userknownhostsfile=/dev/null -o batchmode=yes -o passwordauthentication=no'
+ cmd = ["/usr/bin/rsync",
+ "--archive",
+ "-e", sshoptions,
+ os.path.join(source, "buildrepository"),
+ os.path.join(source, self.backupfilename),
+ os.path.join(self.args.workspace)]
+ self.logger.debug("Running: %s", str(cmd))
+ try:
+ subprocess.check_call(cmd, shell=False, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as err:
+ if err.returncode == 23:
+ self.logger.info("There is no remote backup.. doing initial build")
+ return True
+ else:
+ raise BuildingError("Rsync from remote server failed with exit code %d" % err.returncode)
+ except:
+ raise BuildingError("Unexpected error")
+
+ backupfile = os.path.join(self.args.workspace, self.backupfilename)
+ with tarfile.open(backupfile, 'r:gz') as tar:
+ tar.extractall(path=self.args.workspace)
+ self.logger.info("Workspace restored from %s:%s",
+ fromserver,
+ fromdirectory)
+
+
+class ArgumentStashMakebuild(object):
+ """ Default arguments which are always needed """
+ def __init__(self):
+ """ init """
+ self.parser = argparse.ArgumentParser(description='''
+ RPM building tool for continuous integration and development usage.
+ Uses remote host to retrieve and store incremental building state.
+ ''')
+ ArgumentMakebuild().set_arguments(self.parser)
+ ArgumentRemote().set_arguments(self.parser)
+ self.parser.add_argument("--remotefunction",
+ choices=["pull", "pullpush"],
+ default="pull",
+ help="With \"pullpush\" remote is used to fetch previous"
+ " build state and on succesful build remote is updated with"
+ " new packages. With \"pull\" packages are fetched but "
+ " remote is not updated on succesful builds. (Default: pull)")
+
+
+def main():
+ """ Read arguments and start processing build configuration """
+ args = ArgumentStashMakebuild().parser.parse_args()
+
+ debugfiletarget = os.path.join(args.workspace, 'debug.log')
+ configure_logging(args.verbose, debugfiletarget)
+ building = Safebuild(args)
+ building.start_safebuilding()
+
+
+if __name__ == "__main__":
+ main()
--- /dev/null
+#! /usr/bin/python -tt
+# Copyright 2019 Nokia
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import os
+import subprocess
+
+class Stasher(object):
+ def __init__(self, args=None):
+ self.args = args
+ self.backupfilename = "configuration.tar.gz"
+
+ def start(self):
+ self.push_workspace_to_remote(toserver=self.args.remotehost,
+ todirectory=self.args.remotedir,
+ workspace=self.args.workspace)
+
+ def push_workspace_to_remote(self, toserver, todirectory, workspace):
+ """ Move workspace backup to remote host """
+ destination = toserver + ":" + todirectory
+ sshoptions = 'ssh -o stricthostkeychecking=no -o userknownhostsfile=/dev/null -o batchmode=yes -o passwordauthentication=no'
+ sourceconfiguration = os.path.join(workspace, self.backupfilename)
+ sourcerpm = os.path.join(workspace, "buildrepository")
+ rsyncpathval = "mkdir -p " + todirectory + " && rsync"
+ cmd = ["/usr/bin/rsync",
+ "--verbose",
+ "--archive",
+ "--rsync-path", rsyncpathval,
+ "-e", sshoptions,
+ sourceconfiguration, sourcerpm,
+ destination]
+ try:
+ print subprocess.check_output(cmd, shell=False, stderr=subprocess.STDOUT)
+ except subprocess.CalledProcessError as err:
+ raise StasherError("Following command retured code %d: %s" % (err.returncode,
+ ' '.join(err.cmd)))
+
+class StasherError(Exception):
+ """ Exceptions originating from builder """
+ pass
+
+
+
+class ArgumentRemote(object):
+ """ Default arguments which are always needed """
+ def __init__(self):
+ """ Create parser for arguments """
+ self.parser = argparse.ArgumentParser(description='Workspace stasher copies workspace to remote host.')
+ self.set_arguments(self.parser)
+ self.parser.add_argument("--workspace",
+ help="Local (source) directory",
+ required=True)
+
+ def set_arguments(self, parser):
+ """ Add extra arguments to parser """
+ parser.add_argument("--remotehost",
+ help="Remote host where script will ssh/rsync to store build",
+ required=True)
+ parser.add_argument("--remotedir",
+ help="Remote directory to use for storing build",
+ required=True)
+
+
+def main():
+ """ Get arguments required for stashing local workspace """
+ args = ArgumentRemote().parser.parse_args()
+
+ stasher = Stasher(args)
+ stasher.start()
+
+if __name__ == "__main__":
+ main()
--- /dev/null
+[tox]
+envlist = py27
+skipsdist=True
+
+[testenv]
+setenv =
+ COVERAGE_FILE = .coverage-{envname}
+passenv = COVERAGE_FILE
+basepython = python2.7
+
+commands = pytest -v \
+ --basetemp={envtmpdir} \
+ --pep8 \
+ --cov \
+ --cov-branch \
+ --cov-report term \
+ --cov-report html:htmlcov \
+ --cov-config .coveragerc \
+ {posargs:rpmbuilder/executor_test.py rpmbuilder/executor.py}
+
+deps=pytest
+ pytest-cov
+ pytest-flakes
+ pytest-pep8
+ mock
+
+[pytest]
+cache_dir = .pytest-cache
+pep8maxlinelength = 100
+basepython = python2.7
+
+[testenv:clean]
+deps=
+whitelist_externals = rm
+commands = rm -rf .coverage-py27 .pytest-cache __pycache__ htmlcov