add eclass from gentoo main repo

This commit is contained in:
ace
2021-02-06 20:03:46 +03:00
parent 6d4415db6c
commit 4620f15444
222 changed files with 66533 additions and 0 deletions

162
eclass/acct-group.eclass Normal file
View File

@ -0,0 +1,162 @@
# Copyright 2019-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: acct-group.eclass
# @MAINTAINER:
# Michał Górny <mgorny@gentoo.org>
# @AUTHOR:
# Michael Orlitzky <mjo@gentoo.org>
# Michał Górny <mgorny@gentoo.org>
# @SUPPORTED_EAPIS: 7
# @BLURB: Eclass used to create and maintain a single group entry
# @DESCRIPTION:
# This eclass represents and creates a single group entry. The name
# of the group is derived from ${PN}, while (preferred) GID needs to
# be specified via ACCT_GROUP_ID. Packages (and users) needing the group
# in question should depend on the package providing it.
#
# Example:
# If your package needs group 'foo', you create 'acct-group/foo' package
# and add an ebuild with the following contents:
#
# @CODE
# EAPI=7
# inherit acct-group
# ACCT_GROUP_ID=200
# @CODE
#
# Then you add appropriate dependency to your package. The dependency
# type(s) should be:
# - DEPEND (+ RDEPEND) if the group is already needed at build time,
# - RDEPEND if it is needed at install time (e.g. you 'fowners' files
# in pkg_preinst) or run time.
if [[ -z ${_ACCT_GROUP_ECLASS} ]]; then
_ACCT_GROUP_ECLASS=1
case ${EAPI:-0} in
7) ;;
*) die "EAPI=${EAPI:-0} not supported";;
esac
inherit user
[[ ${CATEGORY} == acct-group ]] ||
die "Ebuild error: this eclass can be used only in acct-group category!"
# << Eclass variables >>
# @ECLASS-VARIABLE: ACCT_GROUP_NAME
# @INTERNAL
# @DESCRIPTION:
# The name of the group. This is forced to ${PN} and the policy
# prohibits it from being changed.
ACCT_GROUP_NAME=${PN}
readonly ACCT_GROUP_NAME
# @ECLASS-VARIABLE: ACCT_GROUP_ID
# @REQUIRED
# @DESCRIPTION:
# Preferred GID for the new group. This variable is obligatory, and its
# value must be unique across all group packages. This can be overriden
# in make.conf through ACCT_GROUP_<UPPERCASE_USERNAME>_ID variable.
#
# Overlays should set this to -1 to dynamically allocate GID. Using -1
# in ::gentoo is prohibited by policy.
# @ECLASS-VARIABLE: ACCT_GROUP_ENFORCE_ID
# @DESCRIPTION:
# If set to a non-null value, the eclass will require the group to have
# specified GID. If the group already exists with another GID, or
# the GID is taken by another group, the install will fail.
: ${ACCT_GROUP_ENFORCE_ID:=}
# << Boilerplate ebuild variables >>
: ${DESCRIPTION:="System group: ${ACCT_GROUP_NAME}"}
: ${SLOT:=0}
: ${KEYWORDS:=alpha amd64 arm arm64 hppa ia64 m68k ~mips ppc ppc64 ~riscv s390 sparc x86 ~x64-cygwin ~amd64-linux ~x86-linux ~ppc-macos ~x64-macos ~sparc-solaris ~sparc64-solaris ~x64-solaris ~x86-solaris}
S=${WORKDIR}
# << Phase functions >>
EXPORT_FUNCTIONS pkg_pretend src_install pkg_preinst
# @FUNCTION: acct-group_pkg_pretend
# @DESCRIPTION:
# Performs sanity checks for correct eclass usage, and early-checks
# whether requested GID can be enforced.
acct-group_pkg_pretend() {
debug-print-function ${FUNCNAME} "${@}"
# verify ACCT_GROUP_ID
[[ -n ${ACCT_GROUP_ID} ]] || die "Ebuild error: ACCT_GROUP_ID must be set!"
[[ ${ACCT_GROUP_ID} -ge -1 ]] || die "Ebuild error: ACCT_GROUP_ID=${ACCT_GROUP_ID} invalid!"
local group_id=${ACCT_GROUP_ID}
# check for the override
local override_name=${ACCT_GROUP_NAME^^}
local override_var=ACCT_GROUP_${override_name//-/_}_ID
if [[ -n ${!override_var} ]]; then
group_id=${!override_var}
[[ ${group_id} -ge -1 ]] || die "${override_var}=${group_id} invalid!"
fi
# check for ACCT_GROUP_ID collisions early
if [[ ${group_id} -ne -1 && -n ${ACCT_GROUP_ENFORCE_ID} ]]; then
local group_by_id=$(egetgroupname "${group_id}")
local group_by_name=$(egetent group "${ACCT_GROUP_NAME}")
if [[ -n ${group_by_id} ]]; then
if [[ ${group_by_id} != ${ACCT_GROUP_NAME} ]]; then
eerror "The required GID is already taken by another group."
eerror " GID: ${group_id}"
eerror " needed for: ${ACCT_GROUP_NAME}"
eerror " current group: ${group_by_id}"
die "GID ${group_id} taken already"
fi
elif [[ -n ${group_by_name} ]]; then
eerror "The requested group exists already with wrong GID."
eerror " groupname: ${ACCT_GROUP_NAME}"
eerror " requested GID: ${group_id}"
eerror " current entry: ${group_by_name}"
die "Group ${ACCT_GROUP_NAME} exists with wrong GID"
fi
fi
}
# @FUNCTION: acct-group_src_install
# @DESCRIPTION:
# Installs sysusers.d file for the group.
acct-group_src_install() {
debug-print-function ${FUNCNAME} "${@}"
# check for the override
local override_name=${ACCT_GROUP_NAME^^}
local override_var=ACCT_GROUP_${override_name//-/_}_ID
if [[ -n ${!override_var} ]]; then
ewarn "${override_var}=${!override_var} override in effect, support will not be provided."
_ACCT_GROUP_ID=${!override_var}
else
_ACCT_GROUP_ID=${ACCT_GROUP_ID}
fi
insinto /usr/lib/sysusers.d
newins - ${CATEGORY}-${ACCT_GROUP_NAME}.conf < <(
printf "g\t%q\t%q\n" \
"${ACCT_GROUP_NAME}" \
"${_ACCT_GROUP_ID/#-*/-}"
)
}
# @FUNCTION: acct-group_pkg_preinst
# @DESCRIPTION:
# Creates the group if it does not exist yet.
acct-group_pkg_preinst() {
debug-print-function ${FUNCNAME} "${@}"
enewgroup ${ACCT_GROUP_ENFORCE_ID:+-F} "${ACCT_GROUP_NAME}" \
"${_ACCT_GROUP_ID}"
}
fi

489
eclass/acct-user.eclass Normal file
View File

@ -0,0 +1,489 @@
# Copyright 2019-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: acct-user.eclass
# @MAINTAINER:
# Michał Górny <mgorny@gentoo.org>
# @AUTHOR:
# Michael Orlitzky <mjo@gentoo.org>
# Michał Górny <mgorny@gentoo.org>
# @SUPPORTED_EAPIS: 7
# @BLURB: Eclass used to create and maintain a single user entry
# @DESCRIPTION:
# This eclass represents and creates a single user entry. The name
# of the user is derived from ${PN}, while (preferred) UID needs to
# be specified via ACCT_USER_ID. Additional variables are provided
# to override the default home directory, shell and add group
# membership. Packages needing the user in question should depend
# on the package providing it.
#
# The ebuild needs to call acct-user_add_deps after specifying
# ACCT_USER_GROUPS.
#
# Example:
# If your package needs user 'foo' belonging to same-named group, you
# create 'acct-user/foo' package and add an ebuild with the following
# contents:
#
# @CODE
# EAPI=7
# inherit acct-user
# ACCT_USER_ID=200
# ACCT_USER_GROUPS=( foo )
# acct-user_add_deps
# @CODE
#
# Then you add appropriate dependency to your package. The dependency
# type(s) should be:
# - DEPEND (+ RDEPEND) if the user is already needed at build time,
# - RDEPEND if it is needed at install time (e.g. you 'fowners' files
# in pkg_preinst) or run time.
if [[ -z ${_ACCT_USER_ECLASS} ]]; then
_ACCT_USER_ECLASS=1
case ${EAPI:-0} in
7) ;;
*) die "EAPI=${EAPI:-0} not supported";;
esac
inherit user
[[ ${CATEGORY} == acct-user ]] ||
die "Ebuild error: this eclass can be used only in acct-user category!"
# << Eclass variables >>
# @ECLASS-VARIABLE: ACCT_USER_NAME
# @INTERNAL
# @DESCRIPTION:
# The name of the user. This is forced to ${PN} and the policy prohibits
# it from being changed.
ACCT_USER_NAME=${PN}
readonly ACCT_USER_NAME
# @ECLASS-VARIABLE: ACCT_USER_ID
# @REQUIRED
# @DESCRIPTION:
# Preferred UID for the new user. This variable is obligatory, and its
# value must be unique across all user packages. This can be overriden
# in make.conf through ACCT_USER_<UPPERCASE_USERNAME>_ID variable.
#
# Overlays should set this to -1 to dynamically allocate UID. Using -1
# in ::gentoo is prohibited by policy.
# @ECLASS-VARIABLE: _ACCT_USER_ALREADY_EXISTS
# @INTERNAL
# @DESCRIPTION:
# Status variable which indicates if user already exists.
# @ECLASS-VARIABLE: ACCT_USER_ENFORCE_ID
# @DESCRIPTION:
# If set to a non-null value, the eclass will require the user to have
# specified UID. If the user already exists with another UID, or
# the UID is taken by another user, the install will fail.
: ${ACCT_USER_ENFORCE_ID:=}
# @ECLASS-VARIABLE: ACCT_USER_NO_MODIFY
# @DEFAULT_UNSET
# @DESCRIPTION:
# If set to a non-null value, the eclass will not make any changes
# to an already existing user.
: ${ACCT_USER_NO_MODIFY:=}
# @ECLASS-VARIABLE: ACCT_USER_SHELL
# @DESCRIPTION:
# The shell to use for the user. If not specified, a 'nologin' variant
# for the system is used. This can be overriden in make.conf through
# ACCT_USER_<UPPERCASE_USERNAME>_SHELL variable.
: ${ACCT_USER_SHELL:=-1}
# @ECLASS-VARIABLE: ACCT_USER_HOME
# @DESCRIPTION:
# The home directory for the user. If not specified, /dev/null is used.
# The directory will be created with appropriate permissions if it does
# not exist. When updating, existing home directory will not be moved.
# This can be overriden in make.conf through
# ACCT_USER_<UPPERCASE_USERNAME>_HOME variable.
: ${ACCT_USER_HOME:=/dev/null}
# @ECLASS-VARIABLE: ACCT_USER_HOME_OWNER
# @DEFAULT_UNSET
# @DESCRIPTION:
# The ownership to use for the home directory, in chown ([user][:group])
# syntax. Defaults to the newly created user, and its primary group.
# This can be overriden in make.conf through
# ACCT_USER_<UPPERCASE_USERNAME>_HOME_OWNER variable.
# @ECLASS-VARIABLE: ACCT_USER_HOME_PERMS
# @DESCRIPTION:
# The permissions to use for the home directory, in chmod (octal
# or verbose) form. This can be overriden in make.conf through
# ACCT_USER_<UPPERCASE_USERNAME>_HOME_PERMS variable.
: ${ACCT_USER_HOME_PERMS:=0755}
# @ECLASS-VARIABLE: ACCT_USER_GROUPS
# @REQUIRED
# @DESCRIPTION:
# List of groups the user should belong to. This must be a bash
# array. The first group specified is the user's primary group, while
# the remaining groups (if any) become supplementary groups.
#
# This can be overriden in make.conf through
# ACCT_USER_<UPPERCASE_USERNAME>_GROUPS variable, or appended to
# via ACCT_USER_<UPPERCASE_USERNAME>_GROUPS_ADD. Please note that
# due to technical limitations, the override variables are not arrays
# but space-separated lists.
# << Boilerplate ebuild variables >>
: ${DESCRIPTION:="System user: ${ACCT_USER_NAME}"}
: ${SLOT:=0}
: ${KEYWORDS:=alpha amd64 arm arm64 hppa ia64 m68k ~mips ppc ppc64 ~riscv s390 sparc x86 ~x64-cygwin ~amd64-linux ~x86-linux ~ppc-macos ~x64-macos ~sparc-solaris ~sparc64-solaris ~x64-solaris ~x86-solaris}
S=${WORKDIR}
# << API functions >>
# @FUNCTION: acct-user_add_deps
# @DESCRIPTION:
# Generate appropriate RDEPEND from ACCT_USER_GROUPS. This must be
# called if ACCT_USER_GROUPS are set.
acct-user_add_deps() {
debug-print-function ${FUNCNAME} "${@}"
# ACCT_USER_GROUPS sanity check
if [[ $(declare -p ACCT_USER_GROUPS) != "declare -a"* ]]; then
die 'ACCT_USER_GROUPS must be an array.'
elif [[ ${#ACCT_USER_GROUPS[@]} -eq 0 ]]; then
die 'ACCT_USER_GROUPS must not be empty.'
fi
RDEPEND+=${ACCT_USER_GROUPS[*]/#/ acct-group/}
_ACCT_USER_ADD_DEPS_CALLED=1
}
# << Helper functions >>
# @FUNCTION: eislocked
# @USAGE: <user>
# @INTERNAL
# @DESCRIPTION:
# Check whether the specified user account is currently locked.
# Returns 0 if it is locked, 1 if it is not, 2 if the platform
# does not support determining it.
eislocked() {
[[ $# -eq 1 ]] || die "usage: ${FUNCNAME} <user>"
if [[ ${EUID} != 0 ]]; then
einfo "Insufficient privileges to execute ${FUNCNAME[0]}"
return 0
fi
case ${CHOST} in
*-freebsd*|*-dragonfly*|*-netbsd*)
[[ $(egetent "$1" | cut -d: -f2) == '*LOCKED*'* ]]
;;
*-openbsd*)
return 2
;;
*)
# NB: 'no password' and 'locked' are indistinguishable
# but we also expire the account which is more clear
[[ $(getent shadow "$1" | cut -d: -f2) == '!'* ]] &&
[[ $(getent shadow "$1" | cut -d: -f8) == 1 ]]
;;
esac
}
# @FUNCTION: elockuser
# @USAGE: <user>
# @INTERNAL
# @DESCRIPTION:
# Lock the specified user account, using the available platform-specific
# functions. This should prevent any login to the account.
#
# Established lock can be reverted using eunlockuser.
#
# This function returns 0 if locking succeeded, 2 if it is not supported
# by the platform code or dies if it fails.
elockuser() {
[[ $# -eq 1 ]] || die "usage: ${FUNCNAME} <user>"
if [[ ${EUID} != 0 ]]; then
einfo "Insufficient privileges to execute ${FUNCNAME[0]}"
return 0
fi
eislocked "$1"
[[ $? -eq 0 ]] && return 0
case ${CHOST} in
*-freebsd*|*-dragonfly*)
pw lock "$1" || die "Locking account $1 failed"
pw user mod "$1" -e 1 || die "Expiring account $1 failed"
;;
*-netbsd*)
usermod -e 1 -C yes "$1" || die "Locking account $1 failed"
;;
*-openbsd*)
return 2
;;
*)
usermod -e 1 -L "$1" || die "Locking account $1 failed"
;;
esac
elog "User account $1 locked"
return 0
}
# @FUNCTION: eunlockuser
# @USAGE: <user>
# @INTERNAL
# @DESCRIPTION:
# Unlock the specified user account, using the available platform-
# specific functions.
#
# This function returns 0 if unlocking succeeded, 1 if it is not
# supported by the platform code or dies if it fails.
eunlockuser() {
[[ $# -eq 1 ]] || die "usage: ${FUNCNAME} <user>"
if [[ ${EUID} != 0 ]]; then
einfo "Insufficient privileges to execute ${FUNCNAME[0]}"
return 0
fi
eislocked "$1"
[[ $? -eq 1 ]] && return 0
case ${CHOST} in
*-freebsd*|*-dragonfly*)
pw user mod "$1" -e 0 || die "Unexpiring account $1 failed"
pw unlock "$1" || die "Unlocking account $1 failed"
;;
*-netbsd*)
usermod -e 0 -C no "$1" || die "Unlocking account $1 failed"
;;
*-openbsd*)
return 1
;;
*)
# silence warning if account does not have a password
usermod -e "" -U "$1" 2>/dev/null || die "Unlocking account $1 failed"
;;
esac
ewarn "User account $1 unlocked after reinstating."
return 0
}
# << Phase functions >>
EXPORT_FUNCTIONS pkg_pretend src_install pkg_preinst pkg_postinst \
pkg_prerm
# @FUNCTION: acct-user_pkg_pretend
# @DESCRIPTION:
# Performs sanity checks for correct eclass usage, and early-checks
# whether requested UID can be enforced.
acct-user_pkg_pretend() {
debug-print-function ${FUNCNAME} "${@}"
# verify that acct-user_add_deps() has been called
# (it verifies ACCT_USER_GROUPS itself)
if [[ -z ${_ACCT_USER_ADD_DEPS_CALLED} ]]; then
die "Ebuild error: acct-user_add_deps must have been called in global scope!"
fi
# verify ACCT_USER_ID
[[ -n ${ACCT_USER_ID} ]] || die "Ebuild error: ACCT_USER_ID must be set!"
[[ ${ACCT_USER_ID} -ge -1 ]] || die "Ebuild error: ACCT_USER_ID=${ACCT_USER_ID} invalid!"
local user_id=${ACCT_USER_ID}
# check for the override
local override_name=${ACCT_USER_NAME^^}
local override_var=ACCT_USER_${override_name//-/_}_ID
if [[ -n ${!override_var} ]]; then
user_id=${!override_var}
[[ ${user_id} -ge -1 ]] || die "${override_var}=${user_id} invalid!"
fi
# check for ACCT_USER_ID collisions early
if [[ ${user_id} -ne -1 && -n ${ACCT_USER_ENFORCE_ID} ]]; then
local user_by_id=$(egetusername "${user_id}")
local user_by_name=$(egetent passwd "${ACCT_USER_NAME}")
if [[ -n ${user_by_id} ]]; then
if [[ ${user_by_id} != ${ACCT_USER_NAME} ]]; then
eerror "The required UID is already taken by another user."
eerror " UID: ${user_id}"
eerror " needed for: ${ACCT_USER_NAME}"
eerror " current user: ${user_by_id}"
die "UID ${user_id} taken already"
fi
elif [[ -n ${user_by_name} ]]; then
eerror "The requested user exists already with wrong UID."
eerror " username: ${ACCT_USER_NAME}"
eerror " requested UID: ${user_id}"
eerror " current entry: ${user_by_name}"
die "Username ${ACCT_USER_NAME} exists with wrong UID"
fi
fi
}
# @FUNCTION: acct-user_src_install
# @DESCRIPTION:
# Installs a keep-file into the user's home directory to ensure it is
# owned by the package, and sysusers.d file.
acct-user_src_install() {
debug-print-function ${FUNCNAME} "${@}"
# serialize for override support
local ACCT_USER_GROUPS=${ACCT_USER_GROUPS[*]}
# support make.conf overrides
local override_name=${ACCT_USER_NAME^^}
override_name=${override_name//-/_}
local var
for var in ACCT_USER_{ID,SHELL,HOME{,_OWNER,_PERMS},GROUPS}; do
local var_name=ACCT_USER_${override_name}_${var#ACCT_USER_}
if [[ -n ${!var_name} ]]; then
ewarn "${var_name}=${!var_name} override in effect, support will not be provided."
else
var_name=${var}
fi
declare -g "_${var}=${!var_name}"
done
var_name=ACCT_USER_${override_name}_GROUPS_ADD
if [[ -n ${!var_name} ]]; then
ewarn "${var_name}=${!var_name} override in effect, support will not be provided."
_ACCT_USER_GROUPS+=" ${!var_name}"
fi
# deserialize into an array
local groups=( ${_ACCT_USER_GROUPS} )
if [[ ${_ACCT_USER_HOME} != /dev/null ]]; then
# note: we can't set permissions here since the user isn't
# created yet
keepdir "${_ACCT_USER_HOME}"
fi
insinto /usr/lib/sysusers.d
newins - ${CATEGORY}-${ACCT_USER_NAME}.conf < <(
printf "u\t%q\t%q\t%q\t%q\t%q\n" \
"${ACCT_USER_NAME}" \
"${_ACCT_USER_ID/#-*/-}:${groups[0]}" \
"${DESCRIPTION//[:,=]/;}" \
"${_ACCT_USER_HOME}" \
"${_ACCT_USER_SHELL/#-*/-}"
if [[ ${#groups[@]} -gt 1 ]]; then
printf "m\t${ACCT_USER_NAME}\t%q\n" \
"${groups[@]:1}"
fi
)
}
# @FUNCTION: acct-user_pkg_preinst
# @DESCRIPTION:
# Creates the user if it does not exist yet. Sets permissions
# of the home directory in install image.
acct-user_pkg_preinst() {
debug-print-function ${FUNCNAME} "${@}"
# check if user already exists
_ACCT_USER_ALREADY_EXISTS=
if [[ -n $(egetent passwd "${ACCT_USER_NAME}") ]]; then
_ACCT_USER_ALREADY_EXISTS=1
fi
readonly _ACCT_USER_ALREADY_EXISTS
enewuser ${ACCT_USER_ENFORCE_ID:+-F} -M "${ACCT_USER_NAME}" \
"${_ACCT_USER_ID}" "${_ACCT_USER_SHELL}" "${_ACCT_USER_HOME}" \
"${_ACCT_USER_GROUPS// /,}"
if [[ ${_ACCT_USER_HOME} != /dev/null ]]; then
# default ownership to user:group
if [[ -z ${_ACCT_USER_HOME_OWNER} ]]; then
local group_array=( ${_ACCT_USER_GROUPS} )
_ACCT_USER_HOME_OWNER=${ACCT_USER_NAME}:${group_array[0]}
fi
# Path might be missing due to INSTALL_MASK, etc.
# https://bugs.gentoo.org/691478
if [[ ! -e "${ED}/${_ACCT_USER_HOME#/}" ]]; then
eerror "Home directory is missing from the installation image:"
eerror " ${_ACCT_USER_HOME}"
eerror "Check INSTALL_MASK for entries that would cause this."
die "${_ACCT_USER_HOME} does not exist"
fi
fowners "${_ACCT_USER_HOME_OWNER}" "${_ACCT_USER_HOME}"
fperms "${_ACCT_USER_HOME_PERMS}" "${_ACCT_USER_HOME}"
fi
}
# @FUNCTION: acct-user_pkg_postinst
# @DESCRIPTION:
# Updates user properties if necessary. This needs to be done after
# new home directory is installed.
acct-user_pkg_postinst() {
debug-print-function ${FUNCNAME} "${@}"
if [[ ${EUID} != 0 ]]; then
einfo "Insufficient privileges to execute ${FUNCNAME[0]}"
return 0
fi
if [[ -n ${ACCT_USER_NO_MODIFY} && -n ${_ACCT_USER_ALREADY_EXISTS} ]]; then
eunlockuser "${ACCT_USER_NAME}"
ewarn "User ${ACCT_USER_NAME} already exists; Not touching existing user"
ewarn "due to set ACCT_USER_NO_MODIFY."
return 0
fi
# NB: eset* functions check current value
esethome "${ACCT_USER_NAME}" "${_ACCT_USER_HOME}"
esetshell "${ACCT_USER_NAME}" "${_ACCT_USER_SHELL}"
esetgroups "${ACCT_USER_NAME}" "${_ACCT_USER_GROUPS// /,}"
# comment field can not contain colons
esetcomment "${ACCT_USER_NAME}" "${DESCRIPTION//[:,=]/;}"
eunlockuser "${ACCT_USER_NAME}"
}
# @FUNCTION: acct-user_pkg_prerm
# @DESCRIPTION:
# Ensures that the user account is locked out when it is removed.
acct-user_pkg_prerm() {
debug-print-function ${FUNCNAME} "${@}"
if [[ ${EUID} != 0 ]]; then
einfo "Insufficient privileges to execute ${FUNCNAME[0]}"
return 0
fi
if [[ -z ${REPLACED_BY_VERSION} ]]; then
if [[ -z $(egetent passwd "${ACCT_USER_NAME}") ]]; then
ewarn "User account not found: ${ACCT_USER_NAME}"
ewarn "Locking process will be skipped."
return
fi
esetshell "${ACCT_USER_NAME}" -1
esetcomment "${ACCT_USER_NAME}" \
"$(egetcomment "${ACCT_USER_NAME}"); user account removed @ $(date +%Y-%m-%d)"
elockuser "${ACCT_USER_NAME}"
fi
}
fi

464
eclass/ada.eclass Normal file
View File

@ -0,0 +1,464 @@
# Copyright 2019 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: ada.eclass
# @MAINTAINER:
# Ada team <ada@gentoo.org>
# @AUTHOR:
# Tupone Alfredo <tupone@gentoo.org>
# @SUPPORTED_EAPIS: 6 7
# @BLURB: An eclass for Ada packages
# @DESCRIPTION:
# This eclass set the IUSE and REQUIRED_USE to request the ADA_TARGET
# when the inheriting ebuild can be supported by more than one Ada
# implementation. It also set ADA_USEDEP and ADA_DEPS with a suitable form.
# A common eclass providing helper functions to build and install
# packages supporting Ada implementations.
#
# This eclass sets correct IUSE. Modification of REQUIRED_USE has to
# be done by the author of the ebuild (but ADA_REQUIRED_USE is
# provided for convenience, see below). ada exports ADA_DEPS
# and ADA_USEDEP so you can create correct dependencies for your
# package easily.
#
# Mostly copied from python-single-r1.eclass
case "${EAPI:-0}" in
0|1|2|3|4|5)
die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
;;
6|7)
# EAPI=5 is required for sane USE_EXPAND dependencies
;;
*)
die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
;;
esac
EXPORT_FUNCTIONS pkg_setup
# @ECLASS-VARIABLE: ADA_DEPS
# @OUTPUT_VARIABLE
# @DESCRIPTION:
# This is an eclass-generated Ada dependency string for all
# implementations listed in ADA_COMPAT.
#
# The dependency string is conditional on ADA_TARGET.
#
# Example use:
# @CODE
# RDEPEND="${ADA_DEPS}
# dev-foo/mydep"
# DEPEND="${RDEPEND}"
# @CODE
#
# @ECLASS-VARIABLE: _ADA_ALL_IMPLS
# @INTERNAL
# @DESCRIPTION:
# All supported Ada implementations, most preferred last.
_ADA_ALL_IMPLS=(
gnat_2016 gnat_2017 gnat_2018 gnat_2019
)
readonly _ADA_ALL_IMPLS
# @FUNCTION: _ada_impl_supported
# @USAGE: <impl>
# @INTERNAL
# @DESCRIPTION:
# Check whether the implementation <impl> (ADA_COMPAT-form)
# is still supported.
#
# Returns 0 if the implementation is valid and supported. If it is
# unsupported, returns 1 -- and the caller should ignore the entry.
# If it is invalid, dies with an appopriate error messages.
_ada_impl_supported() {
debug-print-function ${FUNCNAME} "${@}"
[[ ${#} -eq 1 ]] || die "${FUNCNAME}: takes exactly 1 argument (impl)."
local impl=${1}
# keep in sync with _ADA_ALL_IMPLS!
# (not using that list because inline patterns shall be faster)
case "${impl}" in
gnat_201[6789])
return 0
;;
*)
[[ ${ADA_COMPAT_NO_STRICT} ]] && return 1
die "Invalid implementation in ADA_COMPAT: ${impl}"
esac
}
# @FUNCTION: _ada_set_impls
# @INTERNAL
# @DESCRIPTION:
# Check ADA_COMPAT for well-formedness and validity, then set
# two global variables:
#
# - _ADA_SUPPORTED_IMPLS containing valid implementations supported
# by the ebuild (ADA_COMPAT - dead implementations),
#
# - and _ADA_UNSUPPORTED_IMPLS containing valid implementations that
# are not supported by the ebuild.
#
# Implementations in both variables are ordered using the pre-defined
# eclass implementation ordering.
#
# This function must be called once in global scope by an eclass
# utilizing ADA_COMPAT.
_ada_set_impls() {
local i
if ! declare -p ADA_COMPAT &>/dev/null; then
die 'ADA_COMPAT not declared.'
fi
if [[ $(declare -p ADA_COMPAT) != "declare -a"* ]]; then
die 'ADA_COMPAT must be an array.'
fi
for i in "${ADA_COMPAT[@]}"; do
# trigger validity checks
_ada_impl_supported "${i}"
done
local supp=() unsupp=()
for i in "${_ADA_ALL_IMPLS[@]}"; do
if has "${i}" "${ADA_COMPAT[@]}"; then
supp+=( "${i}" )
else
unsupp+=( "${i}" )
fi
done
if [[ ! ${supp[@]} ]]; then
die "No supported implementation in ADA_COMPAT."
fi
if [[ ${_ADA_SUPPORTED_IMPLS[@]} ]]; then
# set once already, verify integrity
if [[ ${_ADA_SUPPORTED_IMPLS[@]} != ${supp[@]} ]]; then
eerror "Supported impls (ADA_COMPAT) changed between inherits!"
eerror "Before: ${_ADA_SUPPORTED_IMPLS[*]}"
eerror "Now : ${supp[*]}"
die "_ADA_SUPPORTED_IMPLS integrity check failed"
fi
if [[ ${_ADA_UNSUPPORTED_IMPLS[@]} != ${unsupp[@]} ]]; then
eerror "Unsupported impls changed between inherits!"
eerror "Before: ${_ADA_UNSUPPORTED_IMPLS[*]}"
eerror "Now : ${unsupp[*]}"
die "_ADA_UNSUPPORTED_IMPLS integrity check failed"
fi
else
_ADA_SUPPORTED_IMPLS=( "${supp[@]}" )
_ADA_UNSUPPORTED_IMPLS=( "${unsupp[@]}" )
readonly _ADA_SUPPORTED_IMPLS _ADA_UNSUPPORTED_IMPLS
fi
}
# @FUNCTION: ada_export
# @USAGE: [<impl>] <variables>...
# @DESCRIPTION:
# Set and export the Ada implementation-relevant variables passed
# as parameters.
#
# The optional first parameter may specify the requested Ada
# implementation (either as ADA_TARGETS value, e.g. ada2_7,
# or an EADA one, e.g. ada2.7). If no implementation passed,
# the current one will be obtained from ${EADA}.
#
# The variables which can be exported are: GCC, EADA, GNATMAKE.
# They are described more completely in the eclass
# variable documentation.
ada_export() {
debug-print-function ${FUNCNAME} "${@}"
local impl var
case "${1}" in
gnat_201[6789])
impl=${1}
shift
;;
*)
impl=${EADA}
if [[ -z ${impl} ]]; then
die "ada_export called without a ada implementation and EADA is unset"
fi
;;
esac
debug-print "${FUNCNAME}: implementation: ${impl}"
local gcc_pv
case "${impl}" in
gnat_2016)
gcc_pv=4.9.4
;;
gnat_2017)
gcc_pv=6.3.0
;;
gnat_2018)
gcc_pv=7.3.1
;;
gnat_2019)
gcc_pv=8.3.1
;;
*)
gcc_pv="9.9.9"
;;
esac
for var; do
case "${var}" in
EADA)
export EADA=${impl}
debug-print "${FUNCNAME}: EADA = ${EADA}"
;;
GCC)
export GCC=${EPREFIX}/usr/bin/gcc-${gcc_pv}
debug-print "${FUNCNAME}: GCC = ${GCC}"
;;
GCC_PV)
export GCC_PV=${gcc_pv}
debug-print "${FUNCNAME}: GCC_PV = ${GCC_PV}"
;;
GNAT)
export GNAT=${EPREFIX}/usr/bin/gnat-${gcc_pv}
debug-print "${FUNCNAME}: GNAT = ${GNAT}"
;;
GNATBIND)
export GNATBIND=${EPREFIX}/usr/bin/gnatbind-${gcc_pv}
debug-print "${FUNCNAME}: GNATBIND = ${GNATBIND}"
;;
GNATMAKE)
export GNATMAKE=${EPREFIX}/usr/bin/gnatmake-${gcc_pv}
debug-print "${FUNCNAME}: GNATMAKE = ${GNATMAKE}"
;;
GNATLS)
export GNATLS=${EPREFIX}/usr/bin/gnatls-${gcc_pv}
debug-print "${FUNCNAME}: GNATLS = ${GNATLS}"
;;
GNATPREP)
export GNATPREP=${EPREFIX}/usr/bin/gnatprep-${gcc_pv}
debug-print "${FUNCNAME}: GNATPREP = ${GNATPREP}"
;;
GNATCHOP)
export GNATCHOP=${EPREFIX}/usr/bin/gnatchop-${gcc_pv}
debug-print "${FUNCNAME}: GNATCHOP = ${GNATCHOP}"
;;
ADA_PKG_DEP)
ADA_PKG_DEP="dev-lang/gnat-gpl:${gcc_pv}"
# use-dep
if [[ ${ADA_REQ_USE} ]]; then
ADA_PKG_DEP+=[${ADA_REQ_USE}]
fi
export ADA_PKG_DEP
debug-print "${FUNCNAME}: ADA_PKG_DEP = ${ADA_PKG_DEP}"
;;
*)
die "ada_export: unknown variable ${var}"
esac
done
}
_ada_single_set_globals() {
_ada_set_impls
local i ADA_PKG_DEP
local flags=( "${_ADA_SUPPORTED_IMPLS[@]/#/ada_target_}" )
local unflags=( "${_ADA_UNSUPPORTED_IMPLS[@]/#/-ada_target_}" )
local allflags=( ${flags[@]} ${unflags[@]} )
local optflags=${flags[@]/%/(-)?}
IUSE="${allflags[*]}"
if [[ ${#_ADA_UNSUPPORTED_IMPLS[@]} -gt 0 ]]; then
optflags+=,${unflags[@]/%/(-)}
fi
local deps requse usedep
if [[ ${#_ADA_SUPPORTED_IMPLS[@]} -eq 1 ]]; then
# There is only one supported implementation; set IUSE and other
# variables without ADA_SINGLE_TARGET.
requse=${flags[*]}
ada_export "${_ADA_SUPPORTED_IMPLS[0]}" ADA_PKG_DEP
deps="${flags[*]}? ( ${ADA_PKG_DEP} ) "
else
# Multiple supported implementations; honor ADA_TARGET.
requse="^^ ( ${flags[*]} )"
for i in "${_ADA_SUPPORTED_IMPLS[@]}"; do
ada_export "${i}" ADA_PKG_DEP
deps+="ada_target_${i}? ( ${ADA_PKG_DEP} ) "
done
fi
usedep=${optflags// /,}
if [[ ${ADA_DEPS+1} ]]; then
if [[ ${ADA_DEPS} != "${deps}" ]]; then
eerror "ADA_DEPS have changed between inherits (ADA_REQ_USE?)!"
eerror "Before: ${ADA_DEPS}"
eerror "Now : ${deps}"
die "ADA_DEPS integrity check failed"
fi
# these two are formality -- they depend on ADA_COMPAT only
if [[ ${ADA_REQUIRED_USE} != ${requse} ]]; then
eerror "ADA_REQUIRED_USE have changed between inherits!"
eerror "Before: ${ADA_REQUIRED_USE}"
eerror "Now : ${requse}"
die "ADA_REQUIRED_USE integrity check failed"
fi
if [[ ${ADA_USEDEP} != "${usedep}" ]]; then
eerror "ADA_USEDEP have changed between inherits!"
eerror "Before: ${ADA_USEDEP}"
eerror "Now : ${usedep}"
die "ADA_USEDEP integrity check failed"
fi
else
ADA_DEPS=${deps}
ADA_REQUIRED_USE=${requse}
ADA_USEDEP=${usedep}
readonly ADA_DEPS ADA_REQUIRED_USE ADA_USEDEP
fi
}
_ada_single_set_globals
unset -f _ada_single_set_globals
# @FUNCTION: ada_wrapper_setup
# @USAGE: [<path> [<impl>]]
# @DESCRIPTION:
# Create proper 'ada' executable wrappers
# in the directory named by <path>. Set up PATH
# appropriately. <path> defaults to ${T}/${EADA}.
#
# The wrappers will be created for implementation named by <impl>,
# or for one named by ${EADA} if no <impl> passed.
#
# If the named directory contains a ada symlink already, it will
# be assumed to contain proper wrappers already and only environment
# setup will be done. If wrapper update is requested, the directory
# shall be removed first.
ada_wrapper_setup() {
debug-print-function ${FUNCNAME} "${@}"
local workdir=${1:-${T}/${EADA}}
local impl=${2:-${EADA}}
[[ ${workdir} ]] || die "${FUNCNAME}: no workdir specified."
[[ ${impl} ]] || die "${FUNCNAME}: no impl nor EADA specified."
if [[ ! -x ${workdir}/bin/gnatmake ]]; then
mkdir -p "${workdir}"/bin || die
local GCC GNATMAKE GNATLS GNATBIND GNATCHOP GNATPREP
ada_export "${impl}" GCC GNAT GNATMAKE GNATLS GNATCHOP GNATBIND GNATPREP
# Ada compiler
cat > "${workdir}/bin/gcc" <<-_EOF_ || die
#!/bin/sh
exec "${GCC}" "\${@}"
_EOF_
chmod a+x "${workdir}/bin/gcc" || die
cat > "${workdir}/bin/gnatmake" <<-_EOF_ || die
#!/bin/sh
exec "${GNATMAKE}" "\${@}"
_EOF_
chmod a+x "${workdir}/bin/gnatmake" || die
cat > "${workdir}/bin/gnatls" <<-_EOF_ || die
#!/bin/sh
exec "${GNATLS}" "\${@}"
_EOF_
chmod a+x "${workdir}/bin/gnatls" || die
cat > "${workdir}/bin/gnatbind" <<-_EOF_ || die
#!/bin/sh
exec "${GNATBIND}" "\${@}"
_EOF_
chmod a+x "${workdir}/bin/gnatbind" || die
cat > "${workdir}/bin/gnatchop" <<-_EOF_ || die
#!/bin/sh
exec "${GNATCHOP}" "\${@}"
_EOF_
chmod a+x "${workdir}/bin/gnatchop" || die
cat > "${workdir}/bin/gnatprep" <<-_EOF_ || die
#!/bin/sh
exec "${GNATPREP}" "\${@}"
_EOF_
chmod a+x "${workdir}/bin/gnatprep" || die
cat > "${workdir}/bin/gnat" <<-_EOF_ || die
#!/bin/sh
exec "${GNAT}" "\${@}"
_EOF_
chmod a+x "${workdir}/bin/gnat" || die
fi
# Now, set the environment.
# But note that ${workdir} may be shared with something else,
# and thus already on top of PATH.
if [[ ${PATH##:*} != ${workdir}/bin ]]; then
PATH=${workdir}/bin${PATH:+:${PATH}}
fi
export PATH
}
# @FUNCTION: ada_setup
# @DESCRIPTION:
# Determine what the selected Ada implementation is and set
# the Ada build environment up for it.
ada_setup() {
debug-print-function ${FUNCNAME} "${@}"
unset EADA
if [[ ${#_ADA_SUPPORTED_IMPLS[@]} -eq 1 ]]; then
if use "ada_target_${_ADA_SUPPORTED_IMPLS[0]}"; then
# Only one supported implementation, enable it explicitly
ada_export "${_ADA_SUPPORTED_IMPLS[0]}" EADA GCC_PV GNAT GNATBIND GNATLS GNATMAKE
ada_wrapper_setup
fi
else
local impl
for impl in "${_ADA_SUPPORTED_IMPLS[@]}"; do
if use "ada_target_${impl}"; then
if [[ ${EADA} ]]; then
eerror "Your ADA_TARGET setting lists more than a single Ada"
eerror "implementation. Please set it to just one value. If you need"
eerror "to override the value for a single package, please use package.env"
eerror "or an equivalent solution (man 5 portage)."
echo
die "More than one implementation in ADA_TARGET."
fi
ada_export "${impl}" EADA GCC_PV GNAT GNATBIND GNATLS GNATMAKE
ada_wrapper_setup
fi
done
fi
if [[ ! ${EADA} ]]; then
eerror "No Ada implementation selected for the build. Please set"
if [[ ${#_ADA_SUPPORTED_IMPLS[@]} -eq 1 ]]; then
eerror "the ADA_TARGETS variable in your make.conf to include one"
else
eerror "the ADA_SINGLE_TARGET variable in your make.conf to one"
fi
eerror "of the following values:"
eerror
eerror "${_ADA_SUPPORTED_IMPLS[@]}"
echo
die "No supported Ada implementation in ADA_SINGLE_TARGET/ADA_TARGETS."
fi
}
# @FUNCTION: ada_pkg_setup
# @DESCRIPTION:
# Runs ada_setup.
ada_pkg_setup() {
debug-print-function ${FUNCNAME} "${@}"
[[ ${MERGE_TYPE} != binary ]] && ada_setup
}

142
eclass/alternatives.eclass Normal file
View File

@ -0,0 +1,142 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: alternatives.eclass
# @AUTHOR:
# Original author: Alastair Tse <liquidx@gentoo.org> (03 Oct 2003)
# @BLURB: Creates symlink to the latest version of multiple slotted packages.
# @DESCRIPTION:
# When a package is SLOT'ed, very often we need to have a symlink to the
# latest version. However, depending on the order the user has merged them,
# more often than not, the symlink maybe clobbered by the older versions.
#
# This eclass provides a convenience function that needs to be given a
# list of alternatives (descending order of recent-ness) and the symlink.
# It will choose the latest version it can find installed and create
# the desired symlink.
#
# There are two ways to use this eclass. First is by declaring two variables
# $SOURCE and $ALTERNATIVES where $SOURCE is the symlink to be created and
# $ALTERNATIVES is a list of alternatives. Second way is the use the function
# alternatives_makesym() like the example below.
# @EXAMPLE:
# pkg_postinst() {
# alternatives_makesym "/usr/bin/python" "/usr/bin/python2.3" "/usr/bin/python2.2"
# }
#
# The above example will create a symlink at /usr/bin/python to either
# /usr/bin/python2.3 or /usr/bin/python2.2. It will choose python2.3 over
# python2.2 if both exist.
#
# Alternatively, you can use this function:
#
# pkg_postinst() {
# alternatives_auto_makesym "/usr/bin/python" "/usr/bin/python[0-9].[0-9]"
# }
#
# This will use bash pathname expansion to fill a list of alternatives it can
# link to. It is probably more robust against version upgrades. You should
# consider using this unless you are want to do something special.
# @ECLASS-VARIABLE: SOURCE
# @DEFAULT_UNSET
# @DESCRIPTION:
# The symlink to be created
# @ECLASS-VARIABLE: ALTERNATIVES
# @DEFAULT_UNSET
# @DESCRIPTION:
# The list of alternatives
# @FUNCTION: alternatives_auto_makesym
# @DESCRIPTION:
# automatic deduction based on a symlink and a regex mask
alternatives_auto_makesym() {
has "${EAPI:-0}" 0 1 2 && ! use prefix && EROOT="${ROOT}"
local SYMLINK REGEX ALT myregex
SYMLINK=$1
REGEX=$2
if [ "${REGEX:0:1}" != "/" ]
then
#not an absolute path:
#inherit the root directory of our main link path for our regex search
myregex="${SYMLINK%/*}/${REGEX}"
else
myregex=${REGEX}
fi
# sort a space delimited string by converting it to a multiline list
# and then run sort -r over it.
# make sure we use ${EROOT} because otherwise stage-building will break
ALT="$(for i in $(echo ${EROOT}${myregex}); do echo ${i#${EROOT}}; done | sort -r)"
alternatives_makesym ${SYMLINK} ${ALT}
}
alternatives_makesym() {
has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
local ALTERNATIVES=""
local SYMLINK=""
local alt pref
# usage: alternatives_makesym <resulting symlink> [alternative targets..]
# make sure it is in the prefix, allow it already to be in the prefix
SYMLINK=${EPREFIX}/${1#${EPREFIX}}
# this trick removes the trailing / from ${ROOT}
pref=${ROOT%/}
shift
ALTERNATIVES=$@
# step through given alternatives from first to last
# and if one exists, link it and finish.
for alt in ${ALTERNATIVES}; do
alt=${EPREFIX}/${alt#${EPREFIX}}
if [ -f "${pref}${alt}" ]; then
#are files in same directory?
if [ "${alt%/*}" = "${SYMLINK%/*}" ]
then
#yes; strip leading dirname from alt to create relative symlink
einfo "Linking ${alt} to ${pref}${SYMLINK} (relative)"
ln -sf ${alt##*/} ${pref}${SYMLINK}
else
#no; keep absolute path
einfo "Linking ${alt} to ${pref}${SYMLINK} (absolute)"
ln -sf ${pref}${alt} ${pref}${SYMLINK}
fi
break
fi
done
# report any errors
if [ ! -L ${pref}${SYMLINK} ]; then
ewarn "Unable to establish ${pref}${SYMLINK} symlink"
else
# we need to check for either the target being in relative path form
# or absolute path form
if [ ! -f "`dirname ${pref}${SYMLINK}`/`readlink ${pref}${SYMLINK}`" -a \
! -f "`readlink ${pref}${SYMLINK}`" ]; then
ewarn "Removing dead symlink ${pref}${SYMLINK}"
rm -f ${pref}${SYMLINK}
fi
fi
}
# @FUNCTION: alernatives-pkg_postinst
# @DESCRIPTION:
# The alternatives pkg_postinst, this function will be exported
alternatives_pkg_postinst() {
if [ -n "${ALTERNATIVES}" -a -n "${SOURCE}" ]; then
alternatives_makesym ${SOURCE} ${ALTERNATIVES}
fi
}
# @FUNCTION: alternatives_pkg_postrm
# @DESCRIPTION:
# The alternatives pkg_postrm, this function will be exported
alternatives_pkg_postrm() {
if [ -n "${ALTERNATIVES}" -a -n "${SOURCE}" ]; then
alternatives_makesym ${SOURCE} ${ALTERNATIVES}
fi
}
EXPORT_FUNCTIONS pkg_postinst pkg_postrm

167
eclass/ant-tasks.eclass Normal file
View File

@ -0,0 +1,167 @@
# Copyright 2007-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: ant-tasks.eclass
# @MAINTAINER:
# java@gentoo.org
# @AUTHOR:
# Vlastimil Babka <caster@gentoo.org>
# @SUPPORTED_EAPIS: 6 7
# @BLURB: Eclass for building dev-java/ant-* packages
# @DESCRIPTION:
# This eclass provides functionality and default ebuild variables for building
# dev-java/ant-* packages easily.
case "${EAPI:-0}" in
0|1|2|3|4|5)
die "ant-tasks.eclass: EAPI ${EAPI} is too old."
;;
6|7)
;;
*)
die "ant-tasks.eclass: EAPI ${EAPI} is not supported yet."
;;
esac
# we set ant-core dep ourselves, restricted
JAVA_ANT_DISABLE_ANT_CORE_DEP=true
# rewriting build.xml for are the testcases has no reason atm
JAVA_PKG_BSFIX_ALL=no
inherit java-pkg-2 java-ant-2
[[ ${EAPI:-0} -eq 6 ]] && inherit eapi7-ver
EXPORT_FUNCTIONS src_unpack src_compile src_install
# @ECLASS-VARIABLE: ANT_TASK_JDKVER
# @DESCRIPTION:
# Affects the >=virtual/jdk version set in DEPEND string. Defaults to 1.8, can
# be overridden from ebuild BEFORE inheriting this eclass.
ANT_TASK_JDKVER=${ANT_TASK_JDKVER-1.8}
# @ECLASS-VARIABLE: ANT_TASK_JREVER
# @DESCRIPTION:
# Affects the >=virtual/jre version set in DEPEND string. Defaults to 1.8, can
# be overridden from ebuild BEFORE inheriting this eclass.
ANT_TASK_JREVER=${ANT_TASK_JREVER-1.8}
# @ECLASS-VARIABLE: ANT_TASK_NAME
# @DESCRIPTION:
# The name of this ant task as recognized by ant's build.xml, derived from $PN
# by removing the ant- prefix. Read-only.
ANT_TASK_NAME="${PN#ant-}"
# @ECLASS-VARIABLE: ANT_TASK_DEPNAME
# @DESCRIPTION:
# Specifies JAVA_PKG_NAME (PN{-SLOT} used with java-pkg_jar-from) of the package
# that this one depends on. Defaults to the name of ant task, ebuild can
# override it before inheriting this eclass. In case there is more than one
# dependency, the variable can be specified as bash array with multiple strings,
# one for each dependency.
ANT_TASK_DEPNAME=${ANT_TASK_DEPNAME-${ANT_TASK_NAME}}
# @ECLASS-VARIABLE: ANT_TASK_DISABLE_VM_DEPS
# @DEFAULT_UNSET
# @DESCRIPTION:
# If set, no JDK/JRE deps are added.
# @VARIABLE: ANT_TASK_PV
# @INTERNAL
# Version of ant-core this task is intended to register and thus load with.
ANT_TASK_PV="${PV}"
# default for final releases
MY_PV=${PV}
UPSTREAM_PREFIX="mirror://apache/ant/source"
GENTOO_PREFIX="https://dev.gentoo.org/~fordfrog/distfiles"
# source/workdir name
MY_P="apache-ant-${MY_PV}"
# Default values for standard ebuild variables, can be overridden from ebuild.
DESCRIPTION="Apache Ant's optional tasks depending on ${ANT_TASK_DEPNAME}"
HOMEPAGE="http://ant.apache.org/"
SRC_URI="${UPSTREAM_PREFIX}/${MY_P}-src.tar.bz2
${GENTOO_PREFIX}/ant-${PV}-gentoo.tar.bz2"
LICENSE="Apache-2.0"
SLOT="0"
RDEPEND="~dev-java/ant-core-${PV}:0"
DEPEND="${RDEPEND}"
if [[ -z "${ANT_TASK_DISABLE_VM_DEPS}" ]]; then
RDEPEND+=" >=virtual/jre-${ANT_TASK_JREVER}"
DEPEND+=" >=virtual/jdk-${ANT_TASK_JDKVER}"
fi
# Would run the full ant test suite for every ant task
RESTRICT="test"
S="${WORKDIR}/${MY_P}"
# @FUNCTION: ant-tasks_src_unpack
# @USAGE: [ base ] [ jar-dep ] [ all ]
# @DESCRIPTION:
# The function Is split into two parts, defaults to both of them ('all').
#
# base: performs the unpack, build.xml replacement and symlinks ant.jar from
# ant-core
#
# jar-dep: symlinks the jar file(s) from dependency package(s)
ant-tasks_src_unpack() {
[[ -z "${1}" ]] && ant-tasks_src_unpack all
while [[ -n "${1}" ]]; do
case ${1} in
base)
unpack ${A}
cd "${S}"
# replace build.xml with our modified for split building
if [ -e "${WORKDIR}"/${PV}-build.patch ] ; then
eapply "${WORKDIR}"/${PV}-build.patch
else
mv -f "${WORKDIR}"/build.xml .
fi
cd lib
# remove bundled xerces
rm -f *.jar
# ant.jar to build against
java-pkg_jar-from --build-only ant-core ant.jar;;
jar-dep)
# get jar from the dependency package(s)
if [[ -n "${ANT_TASK_DEPNAME}" ]]; then
for depname in "${ANT_TASK_DEPNAME[@]}"; do
java-pkg_jar-from ${depname}
done
fi;;
all)
ant-tasks_src_unpack base jar-dep;;
esac
shift
done
}
# @FUNCTION: ant-tasks_src_compile
# @DESCRIPTION:
# Compiles the jar with installed ant-core.
ant-tasks_src_compile() {
ANT_TASKS="none" eant -Dbuild.dep=${ANT_TASK_NAME} jar-dep
}
# @FUNCTION: ant-tasks_src_install
# @DESCRIPTION:
# Installs the jar and registers its presence for the ant launcher script.
# Version param ensures it won't get loaded (thus break) when ant-core is
# updated to newer version.
ant-tasks_src_install() {
java-pkg_dojar build/lib/${PN}.jar
java-pkg_register-ant-task --version "${ANT_TASK_PV}"
# create the compatibility symlink
dodir /usr/share/ant/lib
dosym /usr/share/${PN}/lib/${PN}.jar /usr/share/ant/lib/${PN}.jar
}

714
eclass/apache-2.eclass Normal file
View File

@ -0,0 +1,714 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: apache-2.eclass
# @MAINTAINER:
# polynomial-c@gentoo.org
# @SUPPORTED_EAPIS: 6 7
# @BLURB: Provides a common set of functions for apache-2.x ebuilds
# @DESCRIPTION:
# This eclass handles apache-2.x ebuild functions such as LoadModule generation
# and inter-module dependency checking.
inherit autotools flag-o-matic multilib ssl-cert user toolchain-funcs
[[ ${CATEGORY}/${PN} != www-servers/apache ]] \
&& die "Do not use this eclass with anything else than www-servers/apache ebuilds!"
case ${EAPI:-0} in
0|1|2|3|4|5)
die "This eclass is banned for EAPI<6"
;;
6)
inherit eapi7-ver
;;
*)
LUA_COMPAT=( lua5-{1..4} )
inherit lua-single
;;
esac
# settings which are version specific go in here:
case $(ver_cut 1-2) in
2.4)
DEFAULT_MPM_THREADED="event" #509922
;;
*)
die "Unknown MAJOR.MINOR apache version."
;;
esac
# ==============================================================================
# INTERNAL VARIABLES
# ==============================================================================
# @ECLASS-VARIABLE: GENTOO_PATCHNAME
# @DESCRIPTION:
# This internal variable contains the prefix for the patch tarball.
# Defaults to the full name and version (including revision) of the package.
# If you want to override this in an ebuild, use:
# ORIG_PR="(revision of Gentoo stuff you want)"
# GENTOO_PATCHNAME="gentoo-${PN}-${PV}${ORIG_PR:+-${ORIG_PR}}"
[[ -n "${GENTOO_PATCHNAME}" ]] || GENTOO_PATCHNAME="gentoo-${PF}"
# @ECLASS-VARIABLE: GENTOO_PATCHDIR
# @DESCRIPTION:
# This internal variable contains the working directory where patches and config
# files are located.
# Defaults to the patchset name appended to the working directory.
[[ -n "${GENTOO_PATCHDIR}" ]] || GENTOO_PATCHDIR="${WORKDIR}/${GENTOO_PATCHNAME}"
# @VARIABLE: GENTOO_DEVELOPER
# @DESCRIPTION:
# This variable needs to be set in the ebuild and contains the name of the
# gentoo developer who created the patch tarball
# @VARIABLE: GENTOO_PATCHSTAMP
# @DESCRIPTION:
# This variable needs to be set in the ebuild and contains the date the patch
# tarball was created at in YYYYMMDD format
# @VARIABLE: GENTOO_PATCH_A
# @DESCRIPTION:
# This variable should contain the entire filename of patch tarball.
# Defaults to the name of the patchset, with a datestamp.
[[ -n "${GENTOO_PATCH_A}" ]] || GENTOO_PATCH_A="${GENTOO_PATCHNAME}-${GENTOO_PATCHSTAMP}.tar.bz2"
SRC_URI="mirror://apache/httpd/httpd-${PV}.tar.bz2
https://dev.gentoo.org/~${GENTOO_DEVELOPER}/dist/apache/${GENTOO_PATCH_A}"
# @VARIABLE: IUSE_MPMS_FORK
# @DESCRIPTION:
# This variable needs to be set in the ebuild and contains a list of forking
# (i.e. non-threaded) MPMs
# @VARIABLE: IUSE_MPMS_THREAD
# @DESCRIPTION:
# This variable needs to be set in the ebuild and contains a list of threaded
# MPMs
# @VARIABLE: IUSE_MODULES
# @DESCRIPTION:
# This variable needs to be set in the ebuild and contains a list of available
# built-in modules
IUSE_MPMS="${IUSE_MPMS_FORK} ${IUSE_MPMS_THREAD}"
IUSE="${IUSE} debug doc gdbm ldap libressl selinux ssl static suexec +suexec-caps suexec-syslog split-usr threads"
for module in ${IUSE_MODULES} ; do
case ${module} in
# Enable http2 by default (bug #563452)
http2)
IUSE+=" +apache2_modules_${module}"
;;
*)
IUSE+=" apache2_modules_${module}"
;;
esac
done
_apache2_set_mpms() {
local mpm
local ompm
for mpm in ${IUSE_MPMS} ; do
IUSE="${IUSE} apache2_mpms_${mpm}"
REQUIRED_USE+=" apache2_mpms_${mpm}? ("
for ompm in ${IUSE_MPMS} ; do
if [[ "${mpm}" != "${ompm}" ]] ; then
REQUIRED_USE+=" !apache2_mpms_${ompm}"
fi
done
if has ${mpm} ${IUSE_MPMS_FORK} ; then
REQUIRED_USE+=" !threads"
else
REQUIRED_USE+=" threads"
fi
REQUIRED_USE+=" )"
done
REQUIRED_USE+=" apache2_mpms_prefork? ( !apache2_modules_http2 )"
}
_apache2_set_mpms
unset -f _apache2_set_mpms
# Dependencies
RDEPEND="
dev-lang/perl
>=dev-libs/apr-1.5.1:=
=dev-libs/apr-util-1*:=[gdbm=,ldap?]
dev-libs/libpcre
apache2_modules_brotli? ( >=app-arch/brotli-0.6.0:= )
apache2_modules_deflate? ( sys-libs/zlib )
apache2_modules_http2? (
>=net-libs/nghttp2-1.2.1
kernel_linux? ( sys-apps/util-linux )
)
apache2_modules_md? ( >=dev-libs/jansson-2.10 )
apache2_modules_mime? ( app-misc/mime-types )
apache2_modules_proxy_http2? (
>=net-libs/nghttp2-1.2.1
kernel_linux? ( sys-apps/util-linux )
)
apache2_modules_session_crypto? (
libressl? ( dev-libs/apr-util[libressl] )
!libressl? ( dev-libs/apr-util[openssl] )
)
gdbm? ( sys-libs/gdbm:= )
ldap? ( =net-nds/openldap-2* )
selinux? ( sec-policy/selinux-apache )
ssl? (
!libressl? ( >=dev-libs/openssl-1.0.2:0= )
libressl? ( dev-libs/libressl:0= )
kernel_linux? ( sys-apps/util-linux )
)
"
DEPEND="${RDEPEND}"
BDEPEND="
virtual/pkgconfig
suexec? ( suexec-caps? ( sys-libs/libcap ) )
"
if [[ ${EAPI} == 6 ]] ; then
DEPEND+=" ${BDEPEND}"
fi
PDEPEND="~app-admin/apache-tools-${PV}"
REQUIRED_USE+="
apache2_modules_http2? ( ssl )
apache2_modules_md? ( ssl )
"
S="${WORKDIR}/httpd-${PV}"
# @VARIABLE: MODULE_DEPENDS
# @DESCRIPTION:
# This variable needs to be set in the ebuild and contains a space-separated
# list of dependency tokens each with a module and the module it depends on
# separated by a colon
# now extend REQUIRED_USE to reflect the module dependencies to portage
_apache2_set_module_depends() {
local dep
for dep in ${MODULE_DEPENDS} ; do
REQUIRED_USE+=" apache2_modules_${dep%:*}? ( apache2_modules_${dep#*:} )"
done
}
_apache2_set_module_depends
unset -f _apache2_set_module_depends
# ==============================================================================
# INTERNAL FUNCTIONS
# ==============================================================================
# @ECLASS-VARIABLE: MY_MPM
# @DESCRIPTION:
# This internal variable contains the selected MPM after a call to setup_mpm()
# @FUNCTION: setup_mpm
# @DESCRIPTION:
# This internal function makes sure that only one of APACHE2_MPMS was selected
# or a default based on USE=threads is selected if APACHE2_MPMS is empty
setup_mpm() {
MY_MPM=""
for x in ${IUSE_MPMS} ; do
if use apache2_mpms_${x} ; then
# there can at most be one MPM selected because of REQUIRED_USE constraints
MY_MPM=${x}
elog
elog "Selected MPM: ${MY_MPM}"
elog
break
fi
done
if [[ -z "${MY_MPM}" ]] ; then
if use threads ; then
MY_MPM=${DEFAULT_MPM_THREADED}
elog
elog "Selected default threaded MPM: ${MY_MPM}"
elog
else
MY_MPM=prefork
elog
elog "Selected default MPM: ${MY_MPM}"
elog
fi
fi
}
# @VARIABLE: MODULE_CRITICAL
# @DESCRIPTION:
# This variable needs to be set in the ebuild and contains a space-separated
# list of modules critical for the default apache. A user may still
# disable these modules for custom minimal installation at their own risk.
# @FUNCTION: check_module_critical
# @DESCRIPTION:
# This internal function warns the user about modules critical for the default
# apache configuration.
check_module_critical() {
local unsupported=0
for m in ${MODULE_CRITICAL} ; do
if ! has ${m} ${MY_MODS[@]} ; then
ewarn "Module '${m}' is required in the default apache configuration."
unsupported=1
fi
done
if [[ ${unsupported} -ne 0 ]] ; then
ewarn
ewarn "You have disabled one or more required modules"
ewarn "for the default apache configuration."
ewarn "Although this is not an error, please be"
ewarn "aware that this setup is UNSUPPORTED."
ewarn
fi
}
# @ECLASS-VARIABLE: MY_CONF
# @DESCRIPTION:
# This internal variable contains the econf options for the current module
# selection after a call to setup_modules()
# @ECLASS-VARIABLE: MY_MODS
# @DESCRIPTION:
# This internal variable contains a sorted, space separated list of currently
# selected modules after a call to setup_modules()
# @FUNCTION: setup_modules
# @DESCRIPTION:
# This internal function selects all built-in modules based on USE flags and
# APACHE2_MODULES USE_EXPAND flags
setup_modules() {
local mod_type= x=
if use static ; then
mod_type="static"
else
mod_type="shared"
fi
MY_CONF=( --enable-so=static )
MY_MODS=()
if use ldap ; then
MY_CONF+=(
--enable-authnz_ldap=${mod_type}
--enable-ldap=${mod_type}
)
MY_MODS+=( ldap authnz_ldap )
else
MY_CONF+=( --disable-authnz_ldap --disable-ldap )
fi
if use ssl ; then
MY_CONF+=( --with-ssl --enable-ssl=${mod_type} )
MY_MODS+=( ssl )
else
MY_CONF+=( --without-ssl --disable-ssl )
fi
if use suexec ; then
elog "You can manipulate several configure options of suexec"
elog "through the following environment variables:"
elog
elog " SUEXEC_SAFEPATH: Default PATH for suexec (default: '${EPREFIX}/usr/local/bin:${EPREFIX}/usr/bin:${EPREFIX}/bin')"
if ! use suexec-syslog ; then
elog " SUEXEC_LOGFILE: Path to the suexec logfile (default: '${EPREFIX}/var/log/apache2/suexec_log')"
fi
elog " SUEXEC_CALLER: Name of the user Apache is running as (default: apache)"
elog " SUEXEC_DOCROOT: Directory in which suexec will run scripts (default: '${EPREFIX}/var/www')"
elog " SUEXEC_MINUID: Minimum UID, which is allowed to run scripts via suexec (default: 1000)"
elog " SUEXEC_MINGID: Minimum GID, which is allowed to run scripts via suexec (default: 100)"
elog " SUEXEC_USERDIR: User subdirectories (like /home/user/html) (default: public_html)"
elog " SUEXEC_UMASK: Umask for the suexec process (default: 077)"
elog
MY_CONF+=( --with-suexec-safepath="${SUEXEC_SAFEPATH:-${EPREFIX}/usr/local/bin:${EPREFIX}/usr/bin:${EPREFIX}/bin}" )
MY_CONF+=( $(use_with !suexec-syslog suexec-logfile "${SUEXEC_LOGFILE:-${EPREFIX}/var/log/apache2/suexec_log}") )
MY_CONF+=( $(use_with suexec-syslog) )
if use suexec-syslog && use suexec-caps ; then
MY_CONF+=( --enable-suexec-capabilities )
fi
MY_CONF+=( --with-suexec-bin="${EPREFIX}/usr/sbin/suexec" )
MY_CONF+=( --with-suexec-userdir=${SUEXEC_USERDIR:-public_html} )
MY_CONF+=( --with-suexec-caller=${SUEXEC_CALLER:-apache} )
MY_CONF+=( --with-suexec-docroot="${SUEXEC_DOCROOT:-${EPREFIX}/var/www}" )
MY_CONF+=( --with-suexec-uidmin=${SUEXEC_MINUID:-1000} )
MY_CONF+=( --with-suexec-gidmin=${SUEXEC_MINGID:-100} )
MY_CONF+=( --with-suexec-umask=${SUEXEC_UMASK:-077} )
MY_CONF+=( --enable-suexec=${mod_type} )
MY_MODS+=( suexec )
else
MY_CONF+=( --disable-suexec )
fi
for x in ${IUSE_MODULES} ; do
if use apache2_modules_${x} ; then
MY_CONF+=( --enable-${x}=${mod_type} )
MY_MODS+=( ${x} )
else
MY_CONF+=( --disable-${x} )
fi
done
# sort and uniquify MY_MODS
MY_MODS=( $(echo ${MY_MODS[@]} | tr ' ' '\n' | sort -u) )
check_module_critical
}
# @VARIABLE: MODULE_DEFINES
# @DESCRIPTION:
# This variable needs to be set in the ebuild and contains a space-separated
# list of tokens each mapping a module to a runtime define which can be
# specified in APACHE2_OPTS in /etc/conf.d/apache2 to enable this particular
# module.
# @FUNCTION: generate_load_module
# @DESCRIPTION:
# This internal function generates the LoadModule lines for httpd.conf based on
# the current module selection and MODULE_DEFINES
generate_load_module() {
local def= endit=0 m= mod_lines= mod_dir="${ED%/}/usr/$(get_libdir)/apache2/modules"
if use static; then
sed -i -e "/%%LOAD_MODULE%%/d" \
"${GENTOO_PATCHDIR}"/conf/httpd.conf
return
fi
for m in ${MY_MODS[@]} ; do
if [[ -e "${mod_dir}/mod_${m}.so" ]] ; then
for def in ${MODULE_DEFINES} ; do
if [[ "${m}" == "${def%:*}" ]] ; then
mod_lines="${mod_lines}\n<IfDefine ${def#*:}>"
endit=1
fi
done
mod_lines="${mod_lines}\nLoadModule ${m}_module modules/mod_${m}.so"
if [[ ${endit} -ne 0 ]] ; then
mod_lines="${mod_lines}\n</IfDefine>"
endit=0
fi
fi
done
sed -i -e "s:%%LOAD_MODULE%%:${mod_lines}:" \
"${GENTOO_PATCHDIR}"/conf/httpd.conf
}
# @FUNCTION: check_upgrade
# @DESCRIPTION:
# This internal function checks if the previous configuration file for built-in
# modules exists in ROOT and prevents upgrade in this case. Users are supposed
# to convert this file to the new APACHE2_MODULES USE_EXPAND variable and remove
# it afterwards.
check_upgrade() {
if [[ -e "${EROOT}"etc/apache2/apache2-builtin-mods ]]; then
eerror "The previous configuration file for built-in modules"
eerror "(${EROOT}etc/apache2/apache2-builtin-mods) exists on your"
eerror "system."
eerror
eerror "Please read https://wiki.gentoo.org/wiki/Project:Apache/Upgrading"
eerror "for detailed information how to convert this file to the new"
eerror "APACHE2_MODULES USE_EXPAND variable."
eerror
die "upgrade not possible with existing ${ROOT}etc/apache2/apache2-builtin-mods"
fi
}
# ==============================================================================
# EXPORTED FUNCTIONS
# ==============================================================================
# @FUNCTION: apache-2_pkg_setup
# @DESCRIPTION:
# This function selects built-in modules, the MPM and other configure options,
# creates the apache user and group and informs about CONFIG_SYSVIPC being
# needed (we don't depend on kernel sources and therefore cannot check).
apache-2_pkg_setup() {
check_upgrade
# setup apache user and group
enewgroup apache 81
enewuser apache 81 -1 /var/www apache
setup_mpm
setup_modules
if use debug; then
MY_CONF+=( --enable-exception-hook )
fi
elog "Please note that you need SysV IPC support in your kernel."
elog "Make sure CONFIG_SYSVIPC=y is set."
elog
if use userland_BSD; then
elog "On BSD systems you need to add the following line to /boot/loader.conf:"
elog " accf_http_load=\"YES\""
if use ssl ; then
elog " accf_data_load=\"YES\""
fi
elog
fi
if [[ ${EAPI} != 6 ]] && use apache2_modules_lua ; then
lua-single_pkg_setup
fi
}
# @FUNCTION: apache-2_src_prepare
# @DESCRIPTION:
# This function applies patches, configures a custom file-system layout and
# rebuilds the configure scripts.
apache-2_src_prepare() {
#fix prefix in conf files etc (bug #433736)
use !prefix || sed -e "s@/\(usr\|var\|etc\|run\)/@${EPREFIX}&@g" \
-i "${GENTOO_PATCHDIR}"/conf/httpd.conf "${GENTOO_PATCHDIR}"/scripts/* \
"${GENTOO_PATCHDIR}"/docs/*.example "${GENTOO_PATCHDIR}"/patches/*.layout \
"${GENTOO_PATCHDIR}"/init/* "${GENTOO_PATCHDIR}"/conf/vhosts.d/* \
"${GENTOO_PATCHDIR}"/conf/modules.d/* || die
# 03_all_gentoo-apache-tools.patch injects -Wl,-z,now, which is not a good
# idea for everyone
case ${CHOST} in
*-linux-gnu|*-solaris*|*-freebsd*)
# do nothing, these use GNU binutils
:
;;
*-darwin*)
sed -i -e 's/-Wl,-z,now/-Wl,-bind_at_load/g' \
"${GENTOO_PATCHDIR}"/patches/03_all_gentoo_apache-tools.patch \
|| die
;;
*)
# patch it out to be like upstream
sed -i -e 's/-Wl,-z,now//g' \
"${GENTOO_PATCHDIR}"/patches/03_all_gentoo_apache-tools.patch \
|| die
;;
esac
# Use correct multilib libdir in gentoo patches
sed -i -e "s:/usr/lib:/usr/$(get_libdir):g" \
"${GENTOO_PATCHDIR}"/{conf/httpd.conf,init/*,patches/config.layout} \
|| die "libdir sed failed"
eapply "${GENTOO_PATCHDIR}"/patches/*.patch
default
# Don't rename configure.in _before_ any possible user patches!
if [[ -f "configure.in" ]] ; then
elog "Renaming configure.in to configure.ac"
mv configure.{in,ac} || die
fi
# setup the filesystem layout config
cat "${GENTOO_PATCHDIR}"/patches/config.layout >> "${S}"/config.layout || \
die "Failed preparing config.layout!"
sed -i -e "s:version:${PF}:g" "${S}"/config.layout || die
# apache2.8 instead of httpd.8 (bug #194828)
mv docs/man/{httpd,apache2}.8 || die
sed -i -e 's/httpd\.8/apache2.8/g' Makefile.in || die
# patched-in MPMs need the build environment rebuilt
sed -i -e '/sinclude/d' configure.ac || die
AT_M4DIR=build eautoreconf
# ${T} must be not group-writable, else grsec TPE will block it
chmod g-w "${T}" || die
# This package really should upgrade to using pcre's .pc file.
cat <<-\EOF >"${T}"/pcre-config
#!/bin/bash
flags=()
for flag; do
if [[ ${flag} == "--version" ]]; then
flags+=( --modversion )
else
flags+=( "${flag}" )
fi
done
exec ${PKG_CONFIG} libpcre "${flags[@]}"
EOF
chmod a+x "${T}"/pcre-config || die
}
# @FUNCTION: apache-2_src_configure
# @DESCRIPTION:
# This function adds compiler flags and runs econf and emake based on MY_MPM and
# MY_CONF
apache-2_src_configure() {
tc-export PKG_CONFIG
# Sanity check in case people have bad mounts/TPE settings. #500928
if ! "${T}"/pcre-config --help >/dev/null ; then
eerror "Could not execute ${T}/pcre-config; do you have bad mount"
eerror "permissions in ${T} or have TPE turned on in your kernel?"
die "check your runtime settings #500928"
fi
# Instead of filtering --as-needed (bug #128505), append --no-as-needed
# Thanks to Harald van Dijk
append-ldflags $(no-as-needed)
# peruser MPM debugging with -X is nearly impossible
if has peruser ${IUSE_MPMS} && use apache2_mpms_peruser ; then
use debug && append-flags -DMPM_PERUSER_DEBUG
fi
# econf overwrites the stuff from config.layout, so we have to put them into
# our myconf line too
MY_CONF+=(
--includedir="${EPREFIX}"/usr/include/apache2
--libexecdir="${EPREFIX}"/usr/$(get_libdir)/apache2/modules
--datadir="${EPREFIX}"/var/www/localhost
--sysconfdir="${EPREFIX}"/etc/apache2
--localstatedir="${EPREFIX}"/var
--with-mpm=${MY_MPM}
--with-apr="${SYSROOT}${EPREFIX}"/usr
--with-apr-util="${SYSROOT}${EPREFIX}"/usr
--with-pcre="${T}"/pcre-config
--with-z="${EPREFIX}"/usr
--with-port=80
--with-program-name=apache2
--enable-layout=Gentoo
)
ac_cv_path_PKGCONFIG=${PKG_CONFIG} \
econf "${MY_CONF[@]}"
sed -i -e 's:apache2\.conf:httpd.conf:' include/ap_config_auto.h || die
}
# @FUNCTION: apache-2_src_install
# @DESCRIPTION:
# This function runs `emake install' and generates, installs and adapts the gentoo
# specific configuration files found in the tarball
apache-2_src_install() {
emake DESTDIR="${D}" MKINSTALLDIRS="mkdir -p" install
# install our configuration files
keepdir /etc/apache2/vhosts.d
keepdir /etc/apache2/modules.d
generate_load_module
insinto /etc/apache2
doins -r "${GENTOO_PATCHDIR}"/conf/*
use apache2_modules_mime_magic && doins docs/conf/magic
insinto /etc/logrotate.d
newins "${GENTOO_PATCHDIR}"/scripts/apache2-logrotate apache2
# generate a sane default APACHE2_OPTS
APACHE2_OPTS="-D DEFAULT_VHOST -D INFO"
use doc && APACHE2_OPTS+=" -D MANUAL"
use ssl && APACHE2_OPTS+=" -D SSL -D SSL_DEFAULT_VHOST"
use suexec && APACHE2_OPTS+=" -D SUEXEC"
if has negotiation ${APACHE2_MODULES} && use apache2_modules_negotiation; then
APACHE2_OPTS+=" -D LANGUAGE"
fi
sed -i -e "s:APACHE2_OPTS=\".*\":APACHE2_OPTS=\"${APACHE2_OPTS}\":" \
"${GENTOO_PATCHDIR}"/init/apache2.confd || die
newconfd "${GENTOO_PATCHDIR}"/init/apache2.confd apache2
newinitd "${GENTOO_PATCHDIR}"/init/apache2.initd apache2
# install apache2ctl wrapper for our init script if available
if test -e "${GENTOO_PATCHDIR}"/scripts/apache2ctl; then
exeinto /usr/sbin
doexe "${GENTOO_PATCHDIR}"/scripts/apache2ctl
else
dosym /etc/init.d/apache2 /usr/sbin/apache2ctl
fi
# provide legacy symlink for apxs, bug 177697
dosym apxs /usr/sbin/apxs2
# install some documentation
dodoc ABOUT_APACHE CHANGES LAYOUT README README.platforms VERSIONING
dodoc "${GENTOO_PATCHDIR}"/docs/*
# drop in a convenient link to the manual
if use doc ; then
sed -i -e "s:VERSION:${PVR}:" \
"${ED%/}/etc/apache2/modules.d/00_apache_manual.conf" \
|| die
docompress -x /usr/share/doc/${PF}/manual # 503640
else
rm -f "${ED%/}/etc/apache2/modules.d/00_apache_manual.conf" \
|| die
rm -Rf "${ED%/}/usr/share/doc/${PF}/manual" || die
fi
# the default icons and error pages get stored in
# /usr/share/apache2/{error,icons}
dodir /usr/share/apache2
mv -f "${ED%/}/var/www/localhost/error" \
"${ED%/}/usr/share/apache2/error" || die
mv -f "${ED%/}/var/www/localhost/icons" \
"${ED%/}/usr/share/apache2/icons" || die
rm -rf "${ED%/}/var/www/localhost/" || die
eend $?
# set some sane permissions for suexec
if use suexec ; then
if ! use suexec-syslog || ! use suexec-caps ; then
fowners 0:${SUEXEC_CALLER:-apache} /usr/sbin/suexec
fperms 4710 /usr/sbin/suexec
# provide legacy symlink for suexec, bug 177697
dosym /usr/sbin/suexec /usr/sbin/suexec2
fi
fi
# empty dirs
local i
for i in /var/lib/dav /var/log/apache2 /var/cache/apache2 ; do
keepdir ${i}
fowners apache:apache ${i}
fperms 0750 ${i}
done
}
# @FUNCTION: apache-2_pkg_postinst
# @DESCRIPTION:
# This function creates test certificates if SSL is enabled and installs the
# default index.html to /var/www/localhost if it does not exist. We do this here
# because the default webroot is a copy of the files that exist elsewhere and we
# don't want them to be managed/removed by portage when apache is upgraded.
apache-2_pkg_postinst() {
if use ssl && [[ ! -e "${EROOT}/etc/ssl/apache2/server.pem" ]]; then
SSL_ORGANIZATION="${SSL_ORGANIZATION:-Apache HTTP Server}"
install_cert /etc/ssl/apache2/server
ewarn
ewarn "The location of SSL certificates has changed. If you are"
ewarn "upgrading from ${CATEGORY}/${PN}-2.2.13 or earlier (or remerged"
ewarn "*any* apache version), you might want to move your old"
ewarn "certificates from /etc/apache2/ssl/ to /etc/ssl/apache2/ and"
ewarn "update your config files."
ewarn
fi
if [[ ! -e "${EROOT}/var/www/localhost" ]] ; then
mkdir -p "${EROOT}/var/www/localhost/htdocs"
echo "<html><body><h1>It works!</h1></body></html>" > "${EROOT}/var/www/localhost/htdocs/index.html"
fi
echo
elog "Attention: cgi and cgid modules are now handled via APACHE2_MODULES flags"
elog "in make.conf. Make sure to enable those in order to compile them."
elog "In general, you should use 'cgid' with threaded MPMs and 'cgi' otherwise."
echo
}
EXPORT_FUNCTIONS pkg_setup src_prepare src_configure src_install pkg_postinst

238
eclass/apache-module.eclass Normal file
View File

@ -0,0 +1,238 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: apache-module.eclass
# @MAINTAINER:
# apache-devs@gentoo.org
# @BLURB: Provides a common set of functions for apache modules
# @DESCRIPTION:
# This eclass handles apache modules in a sane way.
#
# To make use of this eclass simply call one of the need/want_apache functions
# described in depend.apache.eclass. Make sure you use the need/want_apache call
# after you have defined DEPEND and RDEPEND. Also note that you can not rely on
# the automatic RDEPEND=DEPEND that portage does if you use this eclass.
#
# See Bug 107127 for more information.
#
# @EXAMPLE:
#
# Here is a simple example of an ebuild for mod_foo:
#
# @CODE
# APACHE2_MOD_CONF="42_mod_foo"
# APACHE2_MOD_DEFINE="FOO"
# need_apache2
# @CODE
#
# A more complicated example for a module with non-standard locations:
#
# @CODE
# APXS2_S="${S}/apache22/src"
# APACHE2_MOD_FILE="${APXS2_S}/${PN}.so"
# APACHE2_MOD_CONF="42_${PN}"
# APACHE2_MOD_DEFINE="FOO"
# DOCFILES="docs/*.html"
# need_apache2_2
# @CODE
#
# A basic module configuration which just loads the module into apache:
#
# @CODE
# <IfDefine FOO>
# LoadModule foo_module modules/mod_foo.so
# </IfDefine>
# @CODE
inherit depend.apache
# ==============================================================================
# PUBLIC VARIABLES
# ==============================================================================
# @VARIABLE: APXS2_S
# @DESCRIPTION:
# Path to temporary build directory. (Defaults to `${S}/src' if it exists,
# `${S}' otherwise)
# @VARIABLE: APXS2_ARGS
# @DESCRIPTION:
# Arguments to pass to the apxs tool. (Defaults to `-c ${PN}.c')
# @VARIABLE: APACHE2_EXECFILES
# @DESCRIPTION:
# List of files that will be installed into ${APACHE_MODULE_DIR} beside
# ${APACHE2_MOD_FILE}. In addition, this function also sets the executable
# permission on those files.
# @VARIABLE: APACHE2_MOD_CONF
# @DESCRIPTION:
# Module configuration file installed by src_install (minus the .conf suffix and
# relative to ${FILESDIR}).
# @VARIABLE: APACHE2_MOD_DEFINE
# @DESCRIPTION:
# Name of define (e.g. FOO) to use in conditional loading of the installed
# module/its config file, multiple defines should be space separated.
# @VARIABLE: APACHE2_MOD_FILE
# @DESCRIPTION:
# Name of the module that src_install installs minus the .so suffix. (Defaults
# to `${APXS2_S}/.libs/${PN}.so')
# @VARIABLE: APACHE2_VHOST_CONF
# @DESCRIPTION:
# Virtual host configuration file installed by src_install (minus the .conf
# suffix and relative to ${FILESDIR}).
# @VARIABLE: DOCFILES
# @DESCRIPTION:
# If the exported src_install() is being used, and ${DOCFILES} is non-zero, some
# sed-fu is applied to split out html documentation (if any) from normal
# documentation, and dodoc'd or dohtml'd.
# ==============================================================================
# INTERNAL FUNCTIONS
# ==============================================================================
# Internal function to construct the default ${APXS2_S} path if required.
apache_cd_dir() {
debug-print-function $FUNCNAME $*
local CD_DIR="${APXS2_S}"
if [[ -z "${CD_DIR}" ]] ; then
if [[ -d "${S}/src" ]] ; then
CD_DIR="${S}/src"
else
CD_DIR="${S}"
fi
fi
debug-print $FUNCNAME "CD_DIR=${CD_DIR}"
echo "${CD_DIR}"
}
# Internal function to construct the default ${APACHE2_MOD_FILE} if required.
apache_mod_file() {
debug-print-function $FUNCNAME $*
local MOD_FILE="${APACHE2_MOD_FILE:-$(apache_cd_dir)/.libs/${PN}.so}"
debug-print $FUNCNAME "MOD_FILE=${MOD_FILE}"
echo "${MOD_FILE}"
}
# Internal function for picking out html files from ${DOCFILES}. It takes an
# optional first argument `html'; if the first argument is equals `html', only
# html files are returned, otherwise normal (non-html) docs are returned.
apache_doc_magic() {
debug-print-function $FUNCNAME $*
local DOCS=
if [[ -n "${DOCFILES}" ]] ; then
if [[ "x$1" == "xhtml" ]] ; then
DOCS="`echo ${DOCFILES} | sed -e 's/ /\n/g' | sed -e '/^[^ ]*.html$/ !d'`"
else
DOCS="`echo ${DOCFILES} | sed 's, *[^ ]*\+.html, ,g'`"
fi
fi
debug-print $FUNCNAME "DOCS=${DOCS}"
echo "${DOCS}"
}
# ==============================================================================
# EXPORTED FUNCTIONS
# ==============================================================================
# @FUNCTION: apache-module_src_compile
# @DESCRIPTION:
# The default action is to call ${APXS} with the value of ${APXS2_ARGS}. If a
# module requires a different build setup than this, use ${APXS} in your own
# src_compile routine.
apache-module_src_compile() {
debug-print-function $FUNCNAME $*
local CD_DIR=$(apache_cd_dir)
cd "${CD_DIR}" || die "cd ${CD_DIR} failed"
APXS2_ARGS="${APXS2_ARGS:--c ${PN}.c}"
${APXS} ${APXS2_ARGS} || die "${APXS} ${APXS2_ARGS} failed"
}
# @FUNCTION: apache-module_src_install
# @DESCRIPTION:
# This installs the files into apache's directories. The module is installed
# from a directory chosen as above (apache_cd_dir). In addition, this function
# can also set the executable permission on files listed in
# ${APACHE2_EXECFILES}. The configuration file name is listed in
# ${APACHE2_MOD_CONF} without the .conf extensions, so if you configuration is
# 55_mod_foo.conf, APACHE2_MOD_CONF would be 55_mod_foo. ${DOCFILES} contains
# the list of files you want filed as documentation.
apache-module_src_install() {
debug-print-function $FUNCNAME $*
local CD_DIR=$(apache_cd_dir)
pushd "${CD_DIR}" >/dev/null || die "cd ${CD_DIR} failed"
local MOD_FILE=$(apache_mod_file)
exeinto "${APACHE_MODULESDIR}"
doexe ${MOD_FILE} || die "internal ebuild error: '${MOD_FILE}' not found"
[[ -n "${APACHE2_EXECFILES}" ]] && doexe ${APACHE2_EXECFILES}
if [[ -n "${APACHE2_MOD_CONF}" ]] ; then
insinto "${APACHE_MODULES_CONFDIR}"
set -- ${APACHE2_MOD_CONF}
newins "${FILESDIR}/${1}.conf" "$(basename ${2:-$1}).conf" \
|| die "internal ebuild error: '${FILESDIR}/${1}.conf' not found"
fi
if [[ -n "${APACHE2_VHOST_CONF}" ]] ; then
insinto "${APACHE_VHOSTS_CONFDIR}"
set -- ${APACHE2_VHOST_CONF}
newins "${FILESDIR}/${1}.conf" "$(basename ${2:-$1}).conf " \
|| die "internal ebuild error: '${FILESDIR}/${1}.conf' not found"
fi
cd "${S}"
if [[ -n "${DOCFILES}" ]] ; then
local OTHER_DOCS=$(apache_doc_magic)
local HTML_DOCS=$(apache_doc_magic html)
[[ -n "${OTHER_DOCS}" ]] && dodoc ${OTHER_DOCS}
[[ -n "${HTML_DOCS}" ]] && dohtml ${HTML_DOCS}
fi
popd >/dev/null
}
# @FUNCTION: apache-module_pkg_postinst
# @DESCRIPTION:
# This prints out information about the installed module and how to enable it.
apache-module_pkg_postinst() {
debug-print-function $FUNCNAME $*
if [[ -n "${APACHE2_MOD_DEFINE}" ]] ; then
local my_opts="-D ${APACHE2_MOD_DEFINE// / -D }"
einfo
einfo "To enable ${PN}, you need to edit your /etc/conf.d/apache2 file and"
einfo "add '${my_opts}' to APACHE2_OPTS."
einfo
fi
if [[ -n "${APACHE2_MOD_CONF}" ]] ; then
set -- ${APACHE2_MOD_CONF}
einfo
einfo "Configuration file installed as"
einfo " ${APACHE_MODULES_CONFDIR}/$(basename ${2:-$1}).conf"
einfo "You may want to edit it before turning the module on in /etc/conf.d/apache2"
einfo
fi
}
EXPORT_FUNCTIONS src_compile src_install pkg_postinst

View File

@ -0,0 +1,90 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: aspell-dict-r1.eclass
# @MAINTAINER:
# maintainer-needed@gentoo.org
# @AUTHOR:
# Original author: Seemant Kulleen
# -r1 author: David Seifert
# @SUPPORTED_EAPIS: 6
# @BLURB: An eclass to streamline the construction of ebuilds for new aspell dicts
# @DESCRIPTION:
# The aspell-dict-r1 eclass is designed to streamline the construction of
# ebuilds for the new aspell dictionaries (from gnu.org) which support
# aspell-0.50. Support for aspell-0.60 has been added by Sergey Ulanov.
# @ECLASS-VARIABLE: ASPELL_LANG
# @REQUIRED
# @DESCRIPTION:
# Pure cleartext string that is included into DESCRIPTION. This is the name
# of the language, for instance "Hungarian". Needs to be defined before
# inheriting the eclass.
# @ECLASS-VARIABLE: ASPELL_VERSION
# @DESCRIPTION:
# What major version of aspell is this dictionary for? Valid values are 5, 6 or undefined.
# This value is used to construct SRC_URI and *DEPEND strings. If defined to 6,
# >=app-text/aspell-0.60 will be added to DEPEND and RDEPEND, otherwise,
# >=app-text/aspell-0.50 is added to DEPEND and RDEPEND. If the value is to be overridden,
# it needs to be overridden before inheriting the eclass.
case ${EAPI:-0} in
[0-5])
die "aspell-dict-r1.eclass is banned in EAPI ${EAPI:-0}"
;;
6)
;;
*)
die "Unknown EAPI ${EAPI:-0}"
;;
esac
EXPORT_FUNCTIONS src_configure src_install
if [[ ! ${_ASPELL_DICT_R1} ]]; then
# aspell packages have an idiosyncratic versioning scheme, that is
# the last separating version separator is replaced by a '-'.
_ASPELL_P=aspell${ASPELL_VERSION}-${PN/aspell-/}-${PV%.*}-${PV##*.}
# @ECLASS-VARIABLE: ASPELL_SPELLANG
# @DESCRIPTION:
# Short (readonly) form of the language code, generated from ${PN}
# For instance, 'aspell-hu' yields the value 'hu'.
readonly ASPELL_SPELLANG=${PN/aspell-/}
S="${WORKDIR}/${_ASPELL_P}"
DESCRIPTION="${ASPELL_LANG} language dictionary for aspell"
HOMEPAGE="http://aspell.net"
SRC_URI="mirror://gnu/aspell/dict/${ASPELL_SPELLANG}/${_ASPELL_P}.tar.bz2"
unset _ASPELL_P
IUSE=""
SLOT="0"
_ASPELL_MAJOR_VERSION=${ASPELL_VERSION:-5}
[[ ${_ASPELL_MAJOR_VERSION} != [56] ]] && die "${ASPELL_VERSION} is not a valid version"
RDEPEND=">=app-text/aspell-0.${_ASPELL_MAJOR_VERSION}0"
DEPEND="${RDEPEND}"
unset _ASPELL_MAJOR_VERSION
# @FUNCTION: aspell-dict-r1_src_configure
# @DESCRIPTION:
# The aspell-dict-r1 src_configure function which is exported.
aspell-dict-r1_src_configure() {
# non-autoconf based script, cannot be used with econf
./configure || die
}
# @FUNCTION: aspell-dict-r1_src_install
# @DESCRIPTION:
# The aspell-dict-r1 src_install function which is exported.
aspell-dict-r1_src_install() {
default
[[ -s info ]] && dodoc info
}
_ASPELL_DICT_R1=1
fi

View File

@ -0,0 +1,92 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: autotools-multilib.eclass
# @MAINTAINER:
# gx86-multilib team <multilib@gentoo.org>
# @AUTHOR:
# Author: Michał Górny <mgorny@gentoo.org>
# @SUPPORTED_EAPIS: 4 5
# @BLURB: autotools-utils wrapper for multilib builds
# @DEPRECATED: multilib-minimal
# @DESCRIPTION:
# The autotools-multilib.eclass provides a glue between
# autotools-utils.eclass(5) and multilib-minimal.eclass(5), aiming
# to provide a convenient way to build packages using autotools
# for multiple ABIs.
#
# Inheriting this eclass sets IUSE and exports default multilib_src_*()
# sub-phases that call autotools-utils phase functions for each ABI
# enabled. The multilib_src_*() functions can be defined in ebuild just
# like in multilib-minimal.
# EAPI=4 is required for meaningful MULTILIB_USEDEP.
case ${EAPI:-0} in
6) die "${ECLASS}.eclass is banned in EAPI ${EAPI}";;
4|5) ;;
*) die "EAPI=${EAPI} is not supported" ;;
esac
inherit autotools-utils eutils ltprune multilib-build multilib-minimal
EXPORT_FUNCTIONS src_prepare src_configure src_compile src_test src_install
# Note: _at_args[@] passing is a backwards compatibility measure.
# Don't use it in new packages.
autotools-multilib_src_prepare() {
autotools-utils_src_prepare "${@}"
[[ ${AUTOTOOLS_IN_SOURCE_BUILD} ]] && multilib_copy_sources
}
multilib_src_configure() {
[[ ${AUTOTOOLS_IN_SOURCE_BUILD} ]] && local ECONF_SOURCE=${BUILD_DIR}
autotools-utils_src_configure "${_at_args[@]}"
}
autotools-multilib_src_configure() {
local _at_args=( "${@}" )
multilib-minimal_src_configure
}
multilib_src_compile() {
emake "${_at_args[@]}"
}
autotools-multilib_src_compile() {
local _at_args=( "${@}" )
multilib-minimal_src_compile
}
multilib_src_test() {
autotools-utils_src_test "${_at_args[@]}"
}
autotools-multilib_src_test() {
local _at_args=( "${@}" )
multilib-minimal_src_test
}
multilib_src_install() {
emake DESTDIR="${D}" "${_at_args[@]}" install
}
multilib_src_install_all() {
einstalldocs
# Remove libtool files and unnecessary static libs
local prune_ltfiles=${AUTOTOOLS_PRUNE_LIBTOOL_FILES}
if [[ ${prune_ltfiles} != none ]]; then
prune_libtool_files ${prune_ltfiles:+--${prune_ltfiles}}
fi
}
autotools-multilib_src_install() {
local _at_args=( "${@}" )
multilib-minimal_src_install
}

View File

@ -0,0 +1,385 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: autotools-utils.eclass
# @MAINTAINER:
# Maciej Mrozowski <reavertm@gentoo.org>
# Michał Górny <mgorny@gentoo.org>
# @SUPPORTED_EAPIS: 4 5
# @BLURB: common ebuild functions for autotools-based packages
# @DEPRECATED: out-of-source
# @DESCRIPTION:
# autotools-utils.eclass is autotools.eclass(5) and base.eclass(5) wrapper
# providing all inherited features along with econf arguments as Bash array,
# out of source build with overridable build dir location, static archives
# handling, libtool files removal.
#
# Please note that autotools-utils does not support mixing of its phase
# functions with regular econf/emake calls. If necessary, please call
# autotools-utils_src_compile instead of the latter.
#
# @EXAMPLE:
# Typical ebuild using autotools-utils.eclass:
#
# @CODE
# EAPI="2"
#
# inherit autotools-utils
#
# DESCRIPTION="Foo bar application"
# HOMEPAGE="http://example.org/foo/"
# SRC_URI="mirror://sourceforge/foo/${P}.tar.bz2"
#
# LICENSE="LGPL-2.1"
# KEYWORDS=""
# SLOT="0"
# IUSE="debug doc examples qt4 static-libs tiff"
#
# CDEPEND="
# media-libs/libpng:0
# qt4? (
# dev-qt/qtcore:4
# dev-qt/qtgui:4
# )
# tiff? ( media-libs/tiff:0 )
# "
# RDEPEND="${CDEPEND}
# !media-gfx/bar
# "
# DEPEND="${CDEPEND}
# doc? ( app-doc/doxygen )
# "
#
# # bug 123456
# AUTOTOOLS_IN_SOURCE_BUILD=1
#
# DOCS=(AUTHORS ChangeLog README "Read me.txt" TODO)
#
# PATCHES=(
# "${FILESDIR}/${P}-gcc44.patch" # bug 123458
# "${FILESDIR}/${P}-as-needed.patch"
# "${FILESDIR}/${P}-unbundle_libpng.patch"
# )
#
# src_configure() {
# local myeconfargs=(
# $(use_enable debug)
# $(use_with qt4)
# $(use_enable threads multithreading)
# $(use_with tiff)
# )
# autotools-utils_src_configure
# }
#
# src_compile() {
# autotools-utils_src_compile
# use doc && autotools-utils_src_compile docs
# }
#
# src_install() {
# use doc && HTML_DOCS=("${BUILD_DIR}/apidocs/html/")
# autotools-utils_src_install
# if use examples; then
# dobin "${BUILD_DIR}"/foo_example{1,2,3} \\
# || die 'dobin examples failed'
# fi
# }
#
# @CODE
# Keep variable names synced with cmake-utils and the other way around!
case ${EAPI:-0} in
6) die "${ECLASS}.eclass is banned in EAPI ${EAPI}";;
4|5) ;;
*) die "EAPI=${EAPI} is not supported" ;;
esac
# @ECLASS-VARIABLE: AUTOTOOLS_AUTORECONF
# @DEFAULT_UNSET
# @DESCRIPTION:
# Set to a non-empty value before calling inherit to enable running autoreconf
# in src_prepare() and adding autotools dependencies.
#
# This is usually necessary when using live sources or applying patches
# modifying configure.ac or Makefile.am files. Note that in the latter case
# setting this variable is obligatory even though the eclass will work without
# it (to add the necessary dependencies).
#
# The eclass will try to determine the correct autotools to run including a few
# external tools: gettext, glib-gettext, intltool, gtk-doc, gnome-doc-prepare.
# If your tool is not supported, please open a bug and we'll add support for it.
#
# Note that dependencies are added for autoconf, automake and libtool only.
# If your package needs one of the external tools listed above, you need to add
# appropriate packages to DEPEND yourself.
[[ ${AUTOTOOLS_AUTORECONF} ]] || : ${AUTOTOOLS_AUTO_DEPEND:=no}
# eutils for eqawarn, path_exists
inherit autotools epatch eutils libtool ltprune
EXPORT_FUNCTIONS src_prepare src_configure src_compile src_install src_test
# @ECLASS-VARIABLE: BUILD_DIR
# @DEFAULT_UNSET
# @DESCRIPTION:
# Build directory, location where all autotools generated files should be
# placed. For out of source builds it defaults to ${WORKDIR}/${P}_build.
#
# This variable has been called AUTOTOOLS_BUILD_DIR formerly.
# It is set under that name for compatibility.
# @ECLASS-VARIABLE: AUTOTOOLS_IN_SOURCE_BUILD
# @DEFAULT_UNSET
# @DESCRIPTION:
# Set to enable in-source build.
# @ECLASS-VARIABLE: ECONF_SOURCE
# @DEFAULT_UNSET
# @DESCRIPTION:
# Specify location of autotools' configure script. By default it uses ${S}.
# @ECLASS-VARIABLE: DOCS
# @DEFAULT_UNSET
# @DESCRIPTION:
# Array containing documents passed to dodoc command.
#
# In EAPIs 4+, can list directories as well.
#
# Example:
# @CODE
# DOCS=( NEWS README )
# @CODE
# @ECLASS-VARIABLE: HTML_DOCS
# @DEFAULT_UNSET
# @DESCRIPTION:
# Array containing documents passed to dohtml command.
#
# Example:
# @CODE
# HTML_DOCS=( doc/html/ )
# @CODE
# @ECLASS-VARIABLE: PATCHES
# @DEFAULT_UNSET
# @DESCRIPTION:
# PATCHES array variable containing all various patches to be applied.
#
# Example:
# @CODE
# PATCHES=( "${FILESDIR}"/${P}-mypatch.patch )
# @CODE
# @ECLASS-VARIABLE: AUTOTOOLS_PRUNE_LIBTOOL_FILES
# @DEFAULT_UNSET
# @DESCRIPTION:
# Sets the mode of pruning libtool files. The values correspond to
# prune_libtool_files parameters, with leading dashes stripped.
#
# Defaults to pruning the libtool files when static libraries are not
# installed or can be linked properly without them. Libtool files
# for modules (plugins) will be kept in case plugin loader needs them.
#
# If set to 'modules', the .la files for modules will be removed
# as well. This is often the preferred option.
#
# If set to 'all', all .la files will be removed unconditionally. This
# option is discouraged and shall be used only if 'modules' does not
# remove the files.
#
# If set to 'none', no .la files will be pruned ever. Use in corner
# cases only.
# Determine using IN or OUT source build
_check_build_dir() {
: ${ECONF_SOURCE:=${S}}
# Respect both the old variable and the new one, depending
# on which one was set by the ebuild.
if [[ ! ${BUILD_DIR} && ${AUTOTOOLS_BUILD_DIR} ]]; then
eqawarn "The AUTOTOOLS_BUILD_DIR variable has been renamed to BUILD_DIR."
eqawarn "Please migrate the ebuild to use the new one."
# In the next call, both variables will be set already
# and we'd have to know which one takes precedence.
_RESPECT_AUTOTOOLS_BUILD_DIR=1
fi
if [[ ${_RESPECT_AUTOTOOLS_BUILD_DIR} ]]; then
BUILD_DIR=${AUTOTOOLS_BUILD_DIR:-${WORKDIR}/${P}_build}
else
if [[ -n ${AUTOTOOLS_IN_SOURCE_BUILD} ]]; then
: ${BUILD_DIR:=${ECONF_SOURCE}}
else
: ${BUILD_DIR:=${WORKDIR}/${P}_build}
fi
fi
# Backwards compatibility for getting the value.
AUTOTOOLS_BUILD_DIR=${BUILD_DIR}
echo ">>> Working in BUILD_DIR: \"${BUILD_DIR}\""
}
# @FUNCTION: autotools-utils_src_prepare
# @DESCRIPTION:
# The src_prepare function.
#
# Supporting PATCHES array and user patches. See base.eclass(5) for reference.
autotools-utils_src_prepare() {
debug-print-function ${FUNCNAME} "$@"
local want_autoreconf=${AUTOTOOLS_AUTORECONF}
[[ ${PATCHES} ]] && epatch "${PATCHES[@]}"
at_checksum() {
find '(' -name 'Makefile.am' \
-o -name 'configure.ac' \
-o -name 'configure.in' ')' \
-exec cksum {} + | sort -k2
}
[[ ! ${want_autoreconf} ]] && local checksum=$(at_checksum)
epatch_user
if [[ ! ${want_autoreconf} ]]; then
if [[ ${checksum} != $(at_checksum) ]]; then
einfo 'Will autoreconfigure due to user patches applied.'
want_autoreconf=yep
fi
fi
[[ ${want_autoreconf} ]] && eautoreconf
elibtoolize --patch-only
}
# @FUNCTION: autotools-utils_src_configure
# @DESCRIPTION:
# The src_configure function. For out of source build it creates build
# directory and runs econf there. Configuration parameters defined
# in myeconfargs are passed here to econf. Additionally following USE
# flags are known:
#
# IUSE="static-libs" passes --enable-shared and either --disable-static/--enable-static
# to econf respectively.
# @VARIABLE: myeconfargs
# @DEFAULT_UNSET
# @DESCRIPTION:
# Optional econf arguments as Bash array. Should be defined before calling src_configure.
# @CODE
# src_configure() {
# local myeconfargs=(
# --disable-readline
# --with-confdir="/etc/nasty foo confdir/"
# $(use_enable debug cnddebug)
# $(use_enable threads multithreading)
# )
# autotools-utils_src_configure
# }
# @CODE
autotools-utils_src_configure() {
debug-print-function ${FUNCNAME} "$@"
[[ -z ${myeconfargs+1} || $(declare -p myeconfargs) == 'declare -a'* ]] \
|| die 'autotools-utils.eclass: myeconfargs has to be an array.'
# Common args
local econfargs=()
_check_build_dir
if "${ECONF_SOURCE}"/configure --help 2>&1 | grep -q '^ *--docdir='; then
econfargs+=(
--docdir="${EPREFIX}"/usr/share/doc/${PF}
)
fi
# Handle static-libs found in IUSE, disable them by default
if in_iuse static-libs; then
econfargs+=(
--enable-shared
$(use_enable static-libs static)
)
fi
# Append user args
econfargs+=("${myeconfargs[@]}")
mkdir -p "${BUILD_DIR}" || die
pushd "${BUILD_DIR}" > /dev/null || die
econf "${econfargs[@]}" "$@"
popd > /dev/null || die
}
# @FUNCTION: autotools-utils_src_compile
# @DESCRIPTION:
# The autotools src_compile function, invokes emake in specified BUILD_DIR.
autotools-utils_src_compile() {
debug-print-function ${FUNCNAME} "$@"
_check_build_dir
pushd "${BUILD_DIR}" > /dev/null || die
emake "$@" || die 'emake failed'
popd > /dev/null || die
}
# @FUNCTION: autotools-utils_src_install
# @DESCRIPTION:
# The autotools src_install function. Runs emake install, unconditionally
# removes unnecessary static libs (based on shouldnotlink libtool property)
# and removes unnecessary libtool files when static-libs USE flag is defined
# and unset.
#
# DOCS and HTML_DOCS arrays are supported. See base.eclass(5) for reference.
autotools-utils_src_install() {
debug-print-function ${FUNCNAME} "$@"
_check_build_dir
pushd "${BUILD_DIR}" > /dev/null || die
emake DESTDIR="${D}" "$@" install || die "emake install failed"
popd > /dev/null || die
# XXX: support installing them from builddir as well?
if declare -p DOCS &>/dev/null; then
# an empty list == don't install anything
if [[ ${DOCS[@]} ]]; then
# dies by itself
dodoc -r "${DOCS[@]}"
fi
else
local f
# same list as in PMS
for f in README* ChangeLog AUTHORS NEWS TODO CHANGES \
THANKS BUGS FAQ CREDITS CHANGELOG; do
if [[ -s ${f} ]]; then
dodoc "${f}" || die "(default) dodoc ${f} failed"
fi
done
fi
if [[ ${HTML_DOCS} ]]; then
dohtml -r "${HTML_DOCS[@]}" || die "dohtml failed"
fi
# Remove libtool files and unnecessary static libs
local prune_ltfiles=${AUTOTOOLS_PRUNE_LIBTOOL_FILES}
if [[ ${prune_ltfiles} != none ]]; then
prune_libtool_files ${prune_ltfiles:+--${prune_ltfiles}}
fi
}
# @FUNCTION: autotools-utils_src_test
# @DESCRIPTION:
# The autotools src_test function. Runs emake check in build directory.
autotools-utils_src_test() {
debug-print-function ${FUNCNAME} "$@"
_check_build_dir
pushd "${BUILD_DIR}" > /dev/null || die
if make -ni check "${@}" &>/dev/null; then
emake check "${@}" || die 'emake check failed.'
elif make -ni test "${@}" &>/dev/null; then
emake test "${@}" || die 'emake test failed.'
fi
popd > /dev/null || die
}

652
eclass/autotools.eclass Normal file
View File

@ -0,0 +1,652 @@
# Copyright 1999-2021 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: autotools.eclass
# @MAINTAINER:
# base-system@gentoo.org
# @SUPPORTED_EAPIS: 0 1 2 3 4 5 6 7
# @BLURB: Regenerates auto* build scripts
# @DESCRIPTION:
# This eclass is for safely handling autotooled software packages that need to
# regenerate their build scripts. All functions will abort in case of errors.
# Note: We require GNU m4, as does autoconf. So feel free to use any features
# from the GNU version of m4 without worrying about other variants (i.e. BSD).
if [[ ${__AUTOTOOLS_AUTO_DEPEND+set} == "set" ]] ; then
# See if we were included already, but someone changed the value
# of AUTOTOOLS_AUTO_DEPEND on us. We could reload the entire
# eclass at that point, but that adds overhead, and it's trivial
# to re-order inherit in eclasses/ebuilds instead. #409611
if [[ ${__AUTOTOOLS_AUTO_DEPEND} != ${AUTOTOOLS_AUTO_DEPEND} ]] ; then
die "AUTOTOOLS_AUTO_DEPEND changed value between inherits; please inherit autotools.eclass first! ${__AUTOTOOLS_AUTO_DEPEND} -> ${AUTOTOOLS_AUTO_DEPEND}"
fi
fi
if [[ -z ${_AUTOTOOLS_ECLASS} ]]; then
_AUTOTOOLS_ECLASS=1
case ${EAPI:-0} in
0|1|2|3|4|5|6|7) ;;
*) die "${ECLASS}: EAPI ${EAPI} not supported" ;;
esac
inherit libtool
# @ECLASS-VARIABLE: WANT_AUTOCONF
# @DESCRIPTION:
# The major version of autoconf your package needs
: ${WANT_AUTOCONF:=latest}
# @ECLASS-VARIABLE: WANT_AUTOMAKE
# @DESCRIPTION:
# The major version of automake your package needs
: ${WANT_AUTOMAKE:=latest}
# @ECLASS-VARIABLE: WANT_LIBTOOL
# @DESCRIPTION:
# Do you want libtool? Valid values here are "latest" and "none".
: ${WANT_LIBTOOL:=latest}
# @ECLASS-VARIABLE: _LATEST_AUTOMAKE
# @INTERNAL
# @DESCRIPTION:
# CONSTANT!
# The latest major unstable and stable version/slot of automake available
# on each arch.
# Only add unstable version if it is in a different slot than latest stable
# version.
# List latest unstable version first to boost testing adoption rate because
# most package manager dependency resolver will pick the first suitable
# version.
# If a newer slot is stable on any arch, and is NOT reflected in this list,
# then circular dependencies may arise during emerge @system bootstraps.
#
# See bug 312315 and 465732 for further information and context.
#
# Do NOT change this variable in your ebuilds!
# If you want to force a newer minor version, you can specify the correct
# WANT value by using a colon: <PV>:<WANT_AUTOMAKE>
_LATEST_AUTOMAKE=( 1.16.2-r1:1.16 )
_automake_atom="sys-devel/automake"
_autoconf_atom="sys-devel/autoconf"
if [[ -n ${WANT_AUTOMAKE} ]]; then
case ${WANT_AUTOMAKE} in
# Even if the package doesn't use automake, we still need to depend
# on it because we run aclocal to process m4 macros. This matches
# the autoreconf tool, so this requirement is correct. #401605
none) ;;
latest)
# Use SLOT deps if we can. For EAPI=0, we get pretty close.
if [[ ${EAPI:-0} != 0 ]] ; then
_automake_atom="|| ( `printf '>=sys-devel/automake-%s:%s ' ${_LATEST_AUTOMAKE[@]/:/ }` )"
else
_automake_atom="|| ( `printf '>=sys-devel/automake-%s ' ${_LATEST_AUTOMAKE[@]/%:*}` )"
fi
;;
*) _automake_atom="=sys-devel/automake-${WANT_AUTOMAKE}*" ;;
esac
export WANT_AUTOMAKE
fi
if [[ -n ${WANT_AUTOCONF} ]] ; then
case ${WANT_AUTOCONF} in
none) _autoconf_atom="" ;; # some packages don't require autoconf at all
2.1) _autoconf_atom="~sys-devel/autoconf-2.13" ;;
# if you change the "latest" version here, change also autotools_env_setup
latest|2.5) _autoconf_atom=">=sys-devel/autoconf-2.69" ;;
*) die "Invalid WANT_AUTOCONF value '${WANT_AUTOCONF}'" ;;
esac
export WANT_AUTOCONF
fi
_libtool_atom=">=sys-devel/libtool-2.4"
if [[ -n ${WANT_LIBTOOL} ]] ; then
case ${WANT_LIBTOOL} in
none) _libtool_atom="" ;;
latest) ;;
*) die "Invalid WANT_LIBTOOL value '${WANT_LIBTOOL}'" ;;
esac
export WANT_LIBTOOL
fi
AUTOTOOLS_DEPEND="${_automake_atom}
${_autoconf_atom}
${_libtool_atom}"
RDEPEND=""
# @ECLASS-VARIABLE: AUTOTOOLS_AUTO_DEPEND
# @DESCRIPTION:
# Set to 'no' to disable automatically adding to DEPEND. This lets
# ebuilds form conditional depends by using ${AUTOTOOLS_DEPEND} in
# their own DEPEND string.
: ${AUTOTOOLS_AUTO_DEPEND:=yes}
if [[ ${AUTOTOOLS_AUTO_DEPEND} != "no" ]] ; then
case ${EAPI:-0} in
0|1|2|3|4|5|6) DEPEND=${AUTOTOOLS_DEPEND} ;;
7) BDEPEND=${AUTOTOOLS_DEPEND} ;;
esac
fi
__AUTOTOOLS_AUTO_DEPEND=${AUTOTOOLS_AUTO_DEPEND} # See top of eclass
unset _automake_atom _autoconf_atom
# @ECLASS-VARIABLE: AM_OPTS
# @DEFAULT_UNSET
# @DESCRIPTION:
# Additional options to pass to automake during
# eautoreconf call.
# @ECLASS-VARIABLE: AT_NOEAUTOHEADER
# @DEFAULT_UNSET
# @DESCRIPTION:
# Don't run eautoheader command if set to 'yes'; only used to work around
# packages that don't want their headers being modified.
# @ECLASS-VARIABLE: AT_NOEAUTOMAKE
# @DEFAULT_UNSET
# @DESCRIPTION:
# Don't run eautomake command if set to 'yes'; only used to workaround
# broken packages. Generally you should, instead, fix the package to
# not call AM_INIT_AUTOMAKE if it doesn't actually use automake.
# @ECLASS-VARIABLE: AT_NOELIBTOOLIZE
# @DEFAULT_UNSET
# @DESCRIPTION:
# Don't run elibtoolize command if set to 'yes',
# useful when elibtoolize needs to be ran with
# particular options
# @ECLASS-VARIABLE: AT_M4DIR
# @DESCRIPTION:
# Additional director(y|ies) aclocal should search
: ${AT_M4DIR:=}
# @ECLASS-VARIABLE: AT_SYS_M4DIR
# @INTERNAL
# @DESCRIPTION:
# For system integrators, a list of additional aclocal search paths.
# This variable gets eval-ed, so you can use variables in the definition
# that may not be valid until eautoreconf & friends are run.
: ${AT_SYS_M4DIR:=}
# @FUNCTION: eautoreconf
# @DESCRIPTION:
# This function mimes the behavior of autoreconf, but uses the different
# eauto* functions to run the tools. It doesn't accept parameters, but
# the directory with include files can be specified with AT_M4DIR variable.
#
# Should do a full autoreconf - normally what most people will be interested in.
# Also should handle additional directories specified by AC_CONFIG_SUBDIRS.
eautoreconf() {
local x g
# Subdirs often share a common build dir #529404. If so, we can't safely
# run in parallel because many tools clobber the content in there. Libtool
# and automake both `rm && cp` while aclocal reads the output. We might be
# able to handle this if we split the steps and grab locks on the dirs the
# tools actually write to. Then we'd run all the common tools that use
# those inputs. Doing this in bash does not scale easily.
# If we do re-enable parallel support, make sure #426512 is handled.
if [[ -z ${AT_NO_RECURSIVE} ]] ; then
# Take care of subdirs
for x in $(autotools_check_macro_val AC_CONFIG_SUBDIRS) ; do
if [[ -d ${x} ]] ; then
pushd "${x}" >/dev/null
# Avoid unsafe nested multijob_finish_one for bug #426512.
AT_NOELIBTOOLIZE="yes" eautoreconf || die
popd >/dev/null
fi
done
fi
einfo "Running eautoreconf in '${PWD}' ..."
local m4dirs=$(autotools_check_macro_val AC_CONFIG_{AUX,MACRO}_DIR)
[[ -n ${m4dirs} ]] && mkdir -p ${m4dirs}
# Run all the tools before aclocal so we can gather the .m4 files.
local i tools=(
# <tool> <was run> <command>
glibgettext false "autotools_run_tool glib-gettextize --copy --force"
gettext false "autotools_run_tool --at-missing autopoint --force"
# intltool must come after autopoint.
intltool false "autotools_run_tool intltoolize --automake --copy --force"
gtkdoc false "autotools_run_tool --at-missing gtkdocize --copy"
gnomedoc false "autotools_run_tool --at-missing gnome-doc-prepare --copy --force"
libtool false "_elibtoolize --auto-ltdl --install --copy --force"
)
for (( i = 0; i < ${#tools[@]}; i += 3 )) ; do
if _at_uses_${tools[i]} ; then
tools[i+1]=true
${tools[i+2]}
fi
done
# Generate aclocal.m4 with our up-to-date m4 files.
local rerun_aclocal=false
eaclocal
# Check to see if we had macros expanded by other macros or in other
# m4 files that we couldn't detect early. This is uncommon, but some
# packages do this, so we have to handle it correctly.
for (( i = 0; i < ${#tools[@]}; i += 3 )) ; do
if ! ${tools[i+1]} && _at_uses_${tools[i]} ; then
${tools[i+2]}
rerun_aclocal=true
fi
done
${rerun_aclocal} && eaclocal
if [[ ${WANT_AUTOCONF} = 2.1 ]] ; then
eautoconf
else
eautoconf --force
fi
[[ ${AT_NOEAUTOHEADER} != "yes" ]] && eautoheader
[[ ${AT_NOEAUTOMAKE} != "yes" ]] && FROM_EAUTORECONF="yes" eautomake ${AM_OPTS}
if [[ ${AT_NOELIBTOOLIZE} != "yes" ]] ; then
# Call it here to prevent failures due to elibtoolize called _before_
# eautoreconf.
elibtoolize --force "${PWD}"
fi
return 0
}
# @FUNCTION: _at_uses_pkg
# @USAGE: <macros>
# @INTERNAL
# See if the specified macros are enabled.
_at_uses_pkg() {
if [[ -n $(autotools_check_macro "$@") ]] ; then
return 0
else
# If the trace didn't find it (perhaps because aclocal.m4 hasn't
# been generated yet), cheat, but be conservative.
local macro args=()
for macro ; do
args+=( -e "^[[:space:]]*${macro}\>" )
done
egrep -q "${args[@]}" configure.??
fi
}
_at_uses_autoheader() { _at_uses_pkg A{C,M}_CONFIG_HEADER{S,}; }
_at_uses_automake() { _at_uses_pkg AM_INIT_AUTOMAKE; }
_at_uses_gettext() { _at_uses_pkg AM_GNU_GETTEXT_{,REQUIRE_}VERSION; }
_at_uses_glibgettext() { _at_uses_pkg AM_GLIB_GNU_GETTEXT; }
_at_uses_intltool() { _at_uses_pkg {AC,IT}_PROG_INTLTOOL; }
_at_uses_gtkdoc() { _at_uses_pkg GTK_DOC_CHECK; }
_at_uses_gnomedoc() { _at_uses_pkg GNOME_DOC_INIT; }
_at_uses_libtool() { _at_uses_pkg A{C,M}_PROG_LIBTOOL LT_INIT; }
_at_uses_libltdl() { _at_uses_pkg LT_CONFIG_LTDL_DIR; }
# @FUNCTION: eaclocal_amflags
# @DESCRIPTION:
# Extract the ACLOCAL_AMFLAGS value from the Makefile.am and try to handle
# (most) of the crazy crap that people throw at us.
eaclocal_amflags() {
local aclocal_opts amflags_file
for amflags_file in GNUmakefile.am Makefile.am GNUmakefile.in Makefile.in ; do
[[ -e ${amflags_file} ]] || continue
# setup the env in case the pkg does something crazy
# in their ACLOCAL_AMFLAGS. like run a shell script
# which turns around and runs autotools. #365401
# or split across multiple lines. #383525
autotools_env_setup
aclocal_opts=$(sed -n \
"/^ACLOCAL_AMFLAGS[[:space:]]*=/{ \
# match the first line
s:[^=]*=::p; \
# then gobble up all escaped lines
: nextline /\\\\$/{ n; p; b nextline; } \
}" ${amflags_file})
eval aclocal_opts=\""${aclocal_opts}"\"
break
done
echo ${aclocal_opts}
}
# @FUNCTION: eaclocal
# @DESCRIPTION:
# These functions runs the autotools using autotools_run_tool with the
# specified parametes. The name of the tool run is the same of the function
# without e prefix.
# They also force installing the support files for safety.
# Respects AT_M4DIR for additional directories to search for macro's.
eaclocal() {
[[ ! -f aclocal.m4 || -n $(grep -e 'generated.*by aclocal' aclocal.m4) ]] && \
autotools_run_tool --at-m4flags aclocal "$@" $(eaclocal_amflags)
}
# @FUNCTION: _elibtoolize
# @DESCRIPTION:
# Runs libtoolize.
#
# Note the '_' prefix: avoid collision with elibtoolize() from libtool.eclass.
_elibtoolize() {
local LIBTOOLIZE=${LIBTOOLIZE:-$(type -P glibtoolize > /dev/null && echo glibtoolize || echo libtoolize)}
if [[ $1 == "--auto-ltdl" ]] ; then
shift
_at_uses_libltdl && set -- "$@" --ltdl
fi
[[ -f GNUmakefile.am || -f Makefile.am ]] && set -- "$@" --automake
autotools_run_tool ${LIBTOOLIZE} "$@"
}
# @FUNCTION: eautoheader
# @DESCRIPTION:
# Runs autoheader.
eautoheader() {
_at_uses_autoheader || return 0
autotools_run_tool --at-no-fail --at-m4flags autoheader "$@"
}
# @FUNCTION: eautoconf
# @DESCRIPTION:
# Runs autoconf.
eautoconf() {
if [[ ! -f configure.ac && ! -f configure.in ]] ; then
echo
eerror "No configure.{ac,in} present in '${PWD}'!"
echo
die "No configure.{ac,in} present!"
fi
if [[ ${WANT_AUTOCONF} != "2.1" && -e configure.in ]] ; then
eqawarn "This package has a configure.in file which has long been deprecated. Please"
eqawarn "update it to use configure.ac instead as newer versions of autotools will die"
eqawarn "when it finds this file. See https://bugs.gentoo.org/426262 for details."
fi
# Install config.guess and config.sub which are required by many macros
# in Autoconf >=2.70.
local _gnuconfig
case ${EAPI:-0} in
0|1|2|3|4|5|6)
_gnuconfig="${EPREFIX}/usr/share/gnuconfig"
;;
*)
_gnuconfig="${BROOT}/usr/share/gnuconfig"
;;
esac
cp "${_gnuconfig}"/config.{guess,sub} . || die
autotools_run_tool --at-m4flags autoconf "$@"
}
# @FUNCTION: eautomake
# @DESCRIPTION:
# Runs automake.
eautomake() {
local extra_opts=()
local makefile_name
# Run automake if:
# - a Makefile.am type file exists
# - the configure script is using the AM_INIT_AUTOMAKE directive
for makefile_name in {GNUmakefile,{M,m}akefile}.am "" ; do
[[ -f ${makefile_name} ]] && break
done
_automake_version() {
autotools_run_tool --at-output automake --version 2>/dev/null |
sed -n -e '1{s:.*(GNU automake) ::p;q}'
}
if [[ -z ${makefile_name} ]] ; then
_at_uses_automake || return 0
elif [[ -z ${FROM_EAUTORECONF} && -f ${makefile_name%.am}.in ]]; then
local used_automake
local installed_automake
installed_automake=$(WANT_AUTOMAKE= _automake_version)
used_automake=$(head -n 1 < ${makefile_name%.am}.in | \
sed -e 's:.*by automake \(.*\) from .*:\1:')
if [[ ${installed_automake} != ${used_automake} ]]; then
ewarn "Automake used for the package (${used_automake}) differs from" \
"the installed version (${installed_automake})."
ewarn "Forcing a full rebuild of the autotools to workaround."
eautoreconf
return 0
fi
fi
[[ -f INSTALL && -f AUTHORS && -f ChangeLog && -f NEWS && -f README ]] \
|| extra_opts+=( --foreign )
# Older versions of automake do not support --force-missing. But we want
# to use this whenever possible to update random bundled files #133489.
case $(_automake_version) in
1.4|1.4[.-]*) ;;
*) extra_opts+=( --force-missing ) ;;
esac
autotools_run_tool automake --add-missing --copy "${extra_opts[@]}" "$@"
}
# @FUNCTION: eautopoint
# @DESCRIPTION:
# Runs autopoint (from the gettext package).
eautopoint() {
autotools_run_tool autopoint "$@"
}
# @FUNCTION: config_rpath_update
# @USAGE: [destination]
# @DESCRIPTION:
# Some packages utilize the config.rpath helper script, but don't
# use gettext directly. So we have to copy it in manually since
# we can't let `autopoint` do it for us.
config_rpath_update() {
local dst src=$(type -P gettext | sed 's:bin/gettext:share/gettext/config.rpath:')
[[ $# -eq 0 ]] && set -- $(find -name config.rpath)
[[ $# -eq 0 ]] && return 0
einfo "Updating all config.rpath files"
for dst in "$@" ; do
einfo " ${dst}"
cp "${src}" "${dst}" || die
done
}
# @FUNCTION: autotools_env_setup
# @INTERNAL
# @DESCRIPTION:
# Process the WANT_AUTO{CONF,MAKE} flags.
autotools_env_setup() {
# We do the "latest" → version switch here because it solves
# possible order problems, see bug #270010 as an example.
if [[ ${WANT_AUTOMAKE} == "latest" ]]; then
local pv
for pv in ${_LATEST_AUTOMAKE[@]/#*:} ; do
# Break on first hit to respect _LATEST_AUTOMAKE order.
local hv_args=""
case ${EAPI:-0} in
5|6)
hv_args="--host-root"
;;
7)
hv_args="-b"
;;
esac
ROOT=/ has_version ${hv_args} "=sys-devel/automake-${pv}*" && export WANT_AUTOMAKE="${pv}" && break
done
[[ ${WANT_AUTOMAKE} == "latest" ]] && \
die "Cannot find the latest automake! Tried ${_LATEST_AUTOMAKE[*]}"
fi
[[ ${WANT_AUTOCONF} == "latest" ]] && export WANT_AUTOCONF=2.5
}
# @FUNCTION: autotools_run_tool
# @USAGE: [--at-no-fail] [--at-m4flags] [--at-missing] [--at-output] <autotool> [tool-specific flags]
# @INTERNAL
# @DESCRIPTION:
# Run the specified autotool helper, but do logging and error checking
# around it in the process.
autotools_run_tool() {
# Process our own internal flags first
local autofail=true m4flags=false missing_ok=false return_output=false
while [[ -n $1 ]] ; do
case $1 in
--at-no-fail) autofail=false;;
--at-m4flags) m4flags=true;;
--at-missing) missing_ok=true;;
--at-output) return_output=true;;
# whatever is left goes to the actual tool
*) break;;
esac
shift
done
if [[ ${EBUILD_PHASE} != "unpack" && ${EBUILD_PHASE} != "prepare" ]]; then
ewarn "QA Warning: running $1 in ${EBUILD_PHASE} phase"
fi
if ${missing_ok} && ! type -P ${1} >/dev/null ; then
einfo "Skipping '$*' due $1 not installed"
return 0
fi
autotools_env_setup
# Allow people to pass in full paths. #549268
local STDERR_TARGET="${T}/${1##*/}.out"
# most of the time, there will only be one run, but if there are
# more, make sure we get unique log filenames
if [[ -e ${STDERR_TARGET} ]] ; then
local i=1
while :; do
STDERR_TARGET="${T}/${1##*/}-${i}.out"
[[ -e ${STDERR_TARGET} ]] || break
: $(( i++ ))
done
fi
if ${m4flags} ; then
set -- "${1}" $(autotools_m4dir_include) $(autotools_m4sysdir_include) "${@:2}"
fi
# If the caller wants to probe something, then let them do it directly.
if ${return_output} ; then
"$@"
return
fi
printf "***** $1 *****\n***** PWD: ${PWD}\n***** $*\n\n" > "${STDERR_TARGET}"
ebegin "Running $@"
"$@" >> "${STDERR_TARGET}" 2>&1
if ! eend $? && ${autofail} ; then
echo
eerror "Failed Running $1 !"
eerror
eerror "Include in your bugreport the contents of:"
eerror
eerror " ${STDERR_TARGET}"
echo
die "Failed Running $1 !"
fi
}
# Internal function to check for support
# Keep a list of all the macros we might use so that we only
# have to run the trace code once. Order doesn't matter.
ALL_AUTOTOOLS_MACROS=(
A{C,M}_PROG_LIBTOOL LT_INIT LT_CONFIG_LTDL_DIR
A{C,M}_CONFIG_HEADER{S,}
AC_CONFIG_SUBDIRS
AC_CONFIG_AUX_DIR AC_CONFIG_MACRO_DIR
AM_INIT_AUTOMAKE
AM_GLIB_GNU_GETTEXT
AM_GNU_GETTEXT_{,REQUIRE_}VERSION
{AC,IT}_PROG_INTLTOOL
GTK_DOC_CHECK
GNOME_DOC_INIT
)
autotools_check_macro() {
[[ -f configure.ac || -f configure.in ]] || return 0
# We can run in multiple dirs, so we have to cache the trace
# data in $PWD rather than an env var.
local trace_file=".__autoconf_trace_data"
if [[ ! -e ${trace_file} ]] || [[ ! aclocal.m4 -ot ${trace_file} ]] ; then
WANT_AUTOCONF="2.5" autoconf \
$(autotools_m4dir_include) \
${ALL_AUTOTOOLS_MACROS[@]/#/--trace=} > ${trace_file} 2>/dev/null
fi
local macro args=()
for macro ; do
has ${macro} ${ALL_AUTOTOOLS_MACROS[@]} || die "internal error: add ${macro} to ALL_AUTOTOOLS_MACROS"
args+=( -e ":${macro}:" )
done
grep "${args[@]}" ${trace_file}
}
# @FUNCTION: autotools_check_macro_val
# @USAGE: <macro> [macros]
# @INTERNAL
# @DESCRIPTION:
# Look for a macro and extract its value.
autotools_check_macro_val() {
local macro scan_out
for macro ; do
autotools_check_macro "${macro}" | \
gawk -v macro="${macro}" \
'($0 !~ /^[[:space:]]*(#|dnl)/) {
if (match($0, macro ":(.*)$", res))
print res[1]
}' | uniq
done
return 0
}
_autotools_m4dir_include() {
local x include_opts flag
# Use the right flag to autoconf based on the version #448986
[[ ${WANT_AUTOCONF} == "2.1" ]] \
&& flag="l" \
|| flag="I"
for x in "$@" ; do
case ${x} in
# We handle it below
-${flag}) ;;
*)
[[ ! -d ${x} ]] && ewarn "autotools.eclass: '${x}' does not exist"
include_opts+=" -${flag} ${x}"
;;
esac
done
echo ${include_opts}
}
autotools_m4dir_include() { _autotools_m4dir_include ${AT_M4DIR} ; }
autotools_m4sysdir_include() {
# First try to use the paths the system integrator has set up.
local paths=( $(eval echo ${AT_SYS_M4DIR}) )
if [[ ${#paths[@]} -eq 0 && -n ${SYSROOT} ]] ; then
# If they didn't give us anything, then default to the SYSROOT.
# This helps when cross-compiling.
local path="${SYSROOT}/usr/share/aclocal"
[[ -d ${path} ]] && paths+=( "${path}" )
fi
_autotools_m4dir_include "${paths[@]}"
}
fi

215
eclass/base.eclass Normal file
View File

@ -0,0 +1,215 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# DEPRECATED
# This eclass has been deprecated and must not be used by any new
# ebuilds or eclasses. Replacements for particular phase functions
# in EAPI 2+:
#
# base_src_unpack() - default (or unpacker_src_unpack if unpacker.eclass
# was inherited)
# base_src_prepare() - inherit eutils, inline:
# epatch "${PATCHES[@]}" # if PATCHES defined as array
# epatch ${PATCHES} # if PATCHES defined as string
# epatch_user
# base_src_configure() - default
# base_src_compile() - default
# base_src_install() - default
# base_src_install_docs() - einstalldocs from eutils.eclass
# @ECLASS: base.eclass
# @MAINTAINER:
# QA Team <qa@gentoo.org>
# @AUTHOR:
# Original author: Dan Armak <danarmak@gentoo.org>
# @SUPPORTED_EAPIS: 0 1 2 3 4 5
# @BLURB: The base eclass defines some default functions and variables.
# @DEPRECATED: none
# @DESCRIPTION:
# The base eclass defines some default functions and variables.
if [[ -z ${_BASE_ECLASS} ]]; then
_BASE_ECLASS=1
inherit eutils
BASE_EXPF="src_unpack src_compile src_install"
case "${EAPI:-0}" in
0|1) ;;
2|3|4|5) BASE_EXPF+=" src_prepare src_configure" ;;
*) die "${ECLASS}.eclass is banned in EAPI ${EAPI}";;
esac
EXPORT_FUNCTIONS ${BASE_EXPF}
# @ECLASS-VARIABLE: DOCS
# @DEFAULT_UNSET
# @DESCRIPTION:
# Array containing documents passed to dodoc command.
#
# DOCS=( "${S}/doc/document.txt" "${S}/doc/doc_folder/" )
# @ECLASS-VARIABLE: HTML_DOCS
# @DEFAULT_UNSET
# @DESCRIPTION:
# Array containing documents passed to dohtml command.
#
# HTML_DOCS=( "${S}/doc/document.html" "${S}/doc/html_folder/" )
# @ECLASS-VARIABLE: PATCHES
# @DEFAULT_UNSET
# @DESCRIPTION:
# PATCHES array variable containing all various patches to be applied.
# This variable is expected to be defined in global scope of ebuild.
# Make sure to specify the full path. This variable is utilised in
# src_unpack/src_prepare phase based on EAPI.
#
# NOTE: if using patches folders with special file suffixes you have to
# define one additional variable EPATCH_SUFFIX="something"
#
# PATCHES=( "${FILESDIR}/mypatch.patch" "${FILESDIR}/patches_folder/" )
# @FUNCTION: base_src_unpack
# @DESCRIPTION:
# The base src_unpack function, which is exported.
# Calls also src_prepare with eapi older than 2.
base_src_unpack() {
debug-print-function $FUNCNAME "$@"
pushd "${WORKDIR}" > /dev/null
if [[ $(type -t unpacker_src_unpack) == "function" ]] ; then
unpacker_src_unpack
elif [[ -n ${A} ]] ; then
unpack ${A}
fi
has src_prepare ${BASE_EXPF} || base_src_prepare
popd > /dev/null
}
# @FUNCTION: base_src_prepare
# @DESCRIPTION:
# The base src_prepare function, which is exported
# EAPI is greater or equal to 2. Here the PATCHES array is evaluated.
base_src_prepare() {
debug-print-function $FUNCNAME "$@"
debug-print "$FUNCNAME: PATCHES=$PATCHES"
local patches_failed=0
pushd "${S}" > /dev/null
if [[ "$(declare -p PATCHES 2>/dev/null 2>&1)" == "declare -a"* ]]; then
for x in "${PATCHES[@]}"; do
debug-print "$FUNCNAME: applying patch from ${x}"
if [[ -d "${x}" ]]; then
# Use standardized names and locations with bulk patching
# Patch directory is ${WORKDIR}/patch
# See epatch() in eutils.eclass for more documentation
EPATCH_SUFFIX=${EPATCH_SUFFIX:=patch}
# in order to preserve normal EPATCH_SOURCE value that can
# be used other way than with base eclass store in local
# variable and restore later
oldval=${EPATCH_SOURCE}
EPATCH_SOURCE=${x}
EPATCH_FORCE=yes
epatch
EPATCH_SOURCE=${oldval}
elif [[ -f "${x}" ]]; then
epatch "${x}"
else
ewarn "QA: File or directory \"${x}\" does not exist."
ewarn "QA: Check your PATCHES array or add missing file/directory."
patches_failed=1
fi
done
[[ ${patches_failed} -eq 1 ]] && die "Some patches failed. See above messages."
else
for x in ${PATCHES}; do
debug-print "$FUNCNAME: patching from ${x}"
epatch "${x}"
done
fi
# Apply user patches
debug-print "$FUNCNAME: applying user patches"
epatch_user
popd > /dev/null
}
# @FUNCTION: base_src_configure
# @DESCRIPTION:
# The base src_configure function, which is exported when
# EAPI is greater or equal to 2. Runs basic econf.
base_src_configure() {
debug-print-function $FUNCNAME "$@"
# there is no pushd ${S} so we can override its place where to run
[[ -x ${ECONF_SOURCE:-.}/configure ]] && econf "$@"
}
# @FUNCTION: base_src_compile
# @DESCRIPTION:
# The base src_compile function, calls src_configure with
# EAPI older than 2.
base_src_compile() {
debug-print-function $FUNCNAME "$@"
has src_configure ${BASE_EXPF} || base_src_configure
base_src_make "$@"
}
# @FUNCTION: base_src_make
# @DESCRIPTION:
# Actual function that runs emake command.
base_src_make() {
debug-print-function $FUNCNAME "$@"
if [[ -f Makefile || -f GNUmakefile || -f makefile ]]; then
emake "$@" || die "died running emake, $FUNCNAME"
fi
}
# @FUNCTION: base_src_install
# @DESCRIPTION:
# The base src_install function. Runs make install and
# installs documents and html documents from DOCS and HTML_DOCS
# arrays.
base_src_install() {
debug-print-function $FUNCNAME "$@"
emake DESTDIR="${D}" "$@" install || die "died running make install, $FUNCNAME"
base_src_install_docs
}
# @FUNCTION: base_src_install_docs
# @DESCRIPTION:
# Actual function that install documentation from
# DOCS and HTML_DOCS arrays.
base_src_install_docs() {
debug-print-function $FUNCNAME "$@"
local x
pushd "${S}" > /dev/null
if [[ "$(declare -p DOCS 2>/dev/null 2>&1)" == "declare -a"* ]]; then
for x in "${DOCS[@]}"; do
debug-print "$FUNCNAME: docs: creating document from ${x}"
dodoc "${x}" || die "dodoc failed"
done
fi
if [[ "$(declare -p HTML_DOCS 2>/dev/null 2>&1)" == "declare -a"* ]]; then
for x in "${HTML_DOCS[@]}"; do
debug-print "$FUNCNAME: docs: creating html document from ${x}"
dohtml -r "${x}" || die "dohtml failed"
done
fi
popd > /dev/null
}
fi

View File

@ -0,0 +1,138 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: bash-completion-r1.eclass
# @MAINTAINER:
# mgorny@gentoo.org
# @SUPPORTED_EAPIS: 0 1 2 3 4 5 6 7
# @BLURB: A few quick functions to install bash-completion files
# @EXAMPLE:
#
# @CODE
# EAPI=5
#
# src_configure() {
# econf \
# --with-bash-completion-dir="$(get_bashcompdir)"
# }
#
# src_install() {
# default
#
# newbashcomp contrib/${PN}.bash-completion ${PN}
# }
# @CODE
inherit toolchain-funcs
case ${EAPI:-0} in
0|1|2|3|4|5|6|7) ;;
*) die "EAPI ${EAPI} unsupported (yet)."
esac
# @FUNCTION: _bash-completion-r1_get_bashdir
# @INTERNAL
# @DESCRIPTION:
# First argument is name of the string in bash-completion.pc
# Second argument is the fallback directory if the string is not found
# @EXAMPLE:
# _bash-completion-r1_get_bashdir completionsdir /usr/share/bash-completion
_bash-completion-r1_get_bashdir() {
debug-print-function ${FUNCNAME} "${@}"
if $(tc-getPKG_CONFIG) --exists bash-completion &>/dev/null; then
local path
path=$($(tc-getPKG_CONFIG) --variable="${1}" bash-completion) || die
# we need to return unprefixed, so strip from what pkg-config returns
# to us, bug #477692
echo "${path#${EPREFIX}}"
else
echo "${2}"
fi
}
# @FUNCTION: _bash-completion-r1_get_bashcompdir
# @INTERNAL
# @DESCRIPTION:
# Get unprefixed bash-completion completions directory.
_bash-completion-r1_get_bashcompdir() {
debug-print-function ${FUNCNAME} "${@}"
_bash-completion-r1_get_bashdir completionsdir /usr/share/bash-completion/completions
}
# @FUNCTION: _bash-completion-r1_get_helpersdir
# @INTERNAL
# @DESCRIPTION:
# Get unprefixed bash-completion helpers directory.
_bash-completion-r1_get_bashhelpersdir() {
debug-print-function ${FUNCNAME} "${@}"
_bash-completion-r1_get_bashdir helpersdir /usr/share/bash-completion/helpers
}
# @FUNCTION: get_bashcompdir
# @DESCRIPTION:
# Get the bash-completion completions directory.
get_bashcompdir() {
debug-print-function ${FUNCNAME} "${@}"
echo "${EPREFIX}$(_bash-completion-r1_get_bashcompdir)"
}
# @FUNCTION: get_bashhelpersdir
# @INTERNAL
# @DESCRIPTION:
# Get the bash-completion helpers directory.
get_bashhelpersdir() {
debug-print-function ${FUNCNAME} "${@}"
echo "${EPREFIX}$(_bash-completion-r1_get_bashhelpersdir)"
}
# @FUNCTION: dobashcomp
# @USAGE: <file> [...]
# @DESCRIPTION:
# Install bash-completion files passed as args. Has EAPI-dependant failure
# behavior (like doins).
dobashcomp() {
debug-print-function ${FUNCNAME} "${@}"
(
insopts -m 0644
insinto "$(_bash-completion-r1_get_bashcompdir)"
doins "${@}"
)
}
# @FUNCTION: newbashcomp
# @USAGE: <file> <newname>
# @DESCRIPTION:
# Install bash-completion file under a new name. Has EAPI-dependant failure
# behavior (like newins).
newbashcomp() {
debug-print-function ${FUNCNAME} "${@}"
(
insopts -m 0644
insinto "$(_bash-completion-r1_get_bashcompdir)"
newins "${@}"
)
}
# @FUNCTION: bashcomp_alias
# @USAGE: <basename> <alias>...
# @DESCRIPTION:
# Alias <basename> completion to one or more commands (<alias>es).
bashcomp_alias() {
debug-print-function ${FUNCNAME} "${@}"
[[ ${#} -lt 2 ]] && die "Usage: ${FUNCNAME} <basename> <alias>..."
local base=${1} f
shift
for f; do
dosym "${base}" "$(_bash-completion-r1_get_bashcompdir)/${f}" \
|| return
done
}

222
eclass/bazel.eclass Normal file
View File

@ -0,0 +1,222 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: bazel.eclass
# @MAINTAINER:
# Jason Zaman <perfinion@gentoo.org>
# @AUTHOR:
# Jason Zaman <perfinion@gentoo.org>
# @SUPPORTED_EAPIS: 7
# @BLURB: Utility functions for packages using Bazel Build
# @DESCRIPTION:
# A utility eclass providing functions to run the Bazel Build system.
#
# This eclass does not export any phase functions.
case "${EAPI:-0}" in
0|1|2|3|4|5|6)
die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
;;
7)
;;
*)
die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
;;
esac
if [[ ! ${_BAZEL_ECLASS} ]]; then
inherit multiprocessing toolchain-funcs
BDEPEND=">=dev-util/bazel-0.20"
# @FUNCTION: bazel_get_flags
# @DESCRIPTION:
# Obtain and print the bazel flags for target and host *FLAGS.
#
# To add more flags to this, append the flags to the
# appropriate variable before calling this function
bazel_get_flags() {
local i fs=()
for i in ${CFLAGS}; do
fs+=( "--conlyopt=${i}" )
done
for i in ${BUILD_CFLAGS}; do
fs+=( "--host_conlyopt=${i}" )
done
for i in ${CXXFLAGS}; do
fs+=( "--cxxopt=${i}" )
done
for i in ${BUILD_CXXFLAGS}; do
fs+=( "--host_cxxopt=${i}" )
done
for i in ${CPPFLAGS}; do
fs+=( "--conlyopt=${i}" "--cxxopt=${i}" )
done
for i in ${BUILD_CPPFLAGS}; do
fs+=( "--host_conlyopt=${i}" "--host_cxxopt=${i}" )
done
for i in ${LDFLAGS}; do
fs+=( "--linkopt=${i}" )
done
for i in ${BUILD_LDFLAGS}; do
fs+=( "--host_linkopt=${i}" )
done
echo "${fs[*]}"
}
# @FUNCTION: bazel_setup_bazelrc
# @DESCRIPTION:
# Creates the bazelrc with common options that will be passed
# to bazel. This will be called by ebazel automatically so
# does not need to be called from the ebuild.
bazel_setup_bazelrc() {
if [[ -f "${T}/bazelrc" ]]; then
return
fi
# F: fopen_wr
# P: /proc/self/setgroups
# Even with standalone enabled, the Bazel sandbox binary is run for feature test:
# https://github.com/bazelbuild/bazel/blob/7b091c1397a82258e26ab5336df6c8dae1d97384/src/main/java/com/google/devtools/build/lib/sandbox/LinuxSandboxedSpawnRunner.java#L61
# https://github.com/bazelbuild/bazel/blob/76555482873ffcf1d32fb40106f89231b37f850a/src/main/tools/linux-sandbox-pid1.cc#L113
addpredict /proc
mkdir -p "${T}/bazel-cache" || die
mkdir -p "${T}/bazel-distdir" || die
cat > "${T}/bazelrc" <<-EOF || die
startup --batch
# dont strip HOME, portage sets a temp per-package dir
build --action_env HOME
# make bazel respect MAKEOPTS
build --jobs=$(makeopts_jobs)
build --compilation_mode=opt --host_compilation_mode=opt
# FLAGS
build $(bazel_get_flags)
# Use standalone strategy to deactivate the bazel sandbox, since it
# conflicts with FEATURES=sandbox.
build --spawn_strategy=standalone --genrule_strategy=standalone
test --spawn_strategy=standalone --genrule_strategy=standalone
build --strip=never
build --verbose_failures --noshow_loading_progress
test --verbose_test_summary --verbose_failures --noshow_loading_progress
# make bazel only fetch distfiles from the cache
fetch --repository_cache="${T}/bazel-cache/" --distdir="${T}/bazel-distdir/"
build --repository_cache="${T}/bazel-cache/" --distdir="${T}/bazel-distdir/"
build --define=PREFIX=${EPREFIX%/}/usr
build --define=LIBDIR=\$(PREFIX)/$(get_libdir)
build --define=INCLUDEDIR=\$(PREFIX)/include
EOF
if tc-is-cross-compiler; then
echo "build --nodistinct_host_configuration" >> "${T}/bazelrc" || die
fi
}
# @FUNCTION: ebazel
# @USAGE: [<args>...]
# @DESCRIPTION:
# Run bazel with the bazelrc and output_base.
#
# output_base will be specific to $BUILD_DIR (if unset, $S).
# bazel_setup_bazelrc will be called and the created bazelrc
# will be passed to bazel.
#
# Will automatically die if bazel does not exit cleanly.
ebazel() {
bazel_setup_bazelrc
# Use different build folders for each multibuild variant.
local output_base="${BUILD_DIR:-${S}}"
output_base="${output_base%/}-bazel-base"
mkdir -p "${output_base}" || die
set -- bazel --bazelrc="${T}/bazelrc" --output_base="${output_base}" ${@}
echo "${*}" >&2
"${@}" || die "ebazel failed"
}
# @FUNCTION: bazel_load_distfiles
# @USAGE: <distfiles>...
# @DESCRIPTION:
# Populate the bazel distdir to fetch from since it cannot use
# the network. Bazel looks in distdir but will only look for the
# original filename, not the possibly renamed one that portage
# downloaded. If the line has -> we to rename it back. This also
# handles use-conditionals that SRC_URI does.
#
# Example:
# @CODE
# bazel_external_uris="http://a/file-2.0.tgz
# python? ( http://b/1.0.tgz -> foo-1.0.tgz )"
# SRC_URI="http://c/${PV}.tgz
# ${bazel_external_uris}"
#
# src_unpack() {
# unpack ${PV}.tgz
# bazel_load_distfiles "${bazel_external_uris}"
# }
# @CODE
bazel_load_distfiles() {
local file=""
local rename=0
[[ "${@}" ]] || die "Missing args"
mkdir -p "${T}/bazel-distdir" || die
for word in ${@}
do
if [[ "${word}" == "->" ]]; then
# next word is a dest filename
rename=1
elif [[ "${word}" == ")" ]]; then
# close conditional block
continue
elif [[ "${word}" == "(" ]]; then
# open conditional block
continue
elif [[ "${word}" == ?(\!)[A-Za-z0-9]*([A-Za-z0-9+_@-])\? ]]; then
# use-conditional block
# USE-flags can contain [A-Za-z0-9+_@-], and start with alphanum
# https://dev.gentoo.org/~ulm/pms/head/pms.html#x1-200003.1.4
# ?(\!) matches zero-or-one !'s
# *(...) zero-or-more characters
# ends with a ?
continue
elif [[ ${rename} -eq 1 ]]; then
# Make sure the distfile is used
if [[ "${A}" == *"${word}"* ]]; then
echo "Copying ${word} to bazel distdir as ${file}"
ln -s "${DISTDIR}/${word}" "${T}/bazel-distdir/${file}" || die
fi
rename=0
file=""
else
# another URL, current one may or may not be a rename
# if there was a previous one, its not renamed so copy it now
if [[ -n "${file}" && "${A}" == *"${file}"* ]]; then
echo "Copying ${file} to bazel distdir"
ln -s "${DISTDIR}/${file}" "${T}/bazel-distdir/${file}" || die
fi
# save the current URL, later we will find out if its a rename or not.
file="${word##*/}"
fi
done
# handle last file
if [[ -n "${file}" ]]; then
echo "Copying ${file} to bazel distdir"
ln -s "${DISTDIR}/${file}" "${T}/bazel-distdir/${file}" || die
fi
}
_BAZEL_ECLASS=1
fi

141
eclass/cannadic.eclass Normal file
View File

@ -0,0 +1,141 @@
# Copyright 1999-2017 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: cannadic.eclass
# @MAINTAINER:
# cjk@gentoo.org
# @AUTHOR:
# Mamoru KOMACHI <usata@gentoo.org>
# @BLURB: Function for Canna compatible dictionaries
# @DESCRIPTION:
# The cannadic eclass is used for installation and setup of Canna
# compatible dictionaries within the Portage system.
inherit eutils
EXPORT_FUNCTIONS pkg_setup pkg_postinst pkg_postrm src_install
HOMEPAGE="http://canna.osdn.jp/" # you need to change this!
SRC_URI="mirror://gentoo/${P}.tar.gz"
DICSDIRFILE="${FILESDIR}/*.dics.dir"
CANNADICS="${CANNADICS}" # (optional)
# You don't need to modify these
CANNADIC_CANNA_DIR="${EROOT:-${ROOT}}"var/lib/canna/dic/canna
CANNADIC_DICS_DIR="${EROOT:-${ROOT}}"var/lib/canna/dic/dics.d
readonly CANNADIC_CANNA_DIR CANNADIC_DICS_DIR
# @FUNCTION: cannadic_pkg_setup
# @DESCRIPTION:
# Sets up ${CANNADIC_CANNA_DIR}
cannadic_pkg_setup() {
keepdir "${CANNADIC_CANNA_DIR}"
fowners bin:bin "${CANNADIC_CANNA_DIR}"
fperms 0775 "${CANNADIC_CANNA_DIR}"
}
# @FUNCTION: cannadic-install
# @DESCRIPTION:
# Installs dictionaries to ${CANNADIC_CANNA_DIR}
cannadic-install() {
insinto "${CANNADIC_CANNA_DIR}"
insopts -m 0664 -o bin -g bin
doins "${@}"
}
# @FUNCTION: dicsdir-install
# @DESCRIPTION:
# Installs dics.dir from ${DICSDIRFILE}
dicsdir-install() {
insinto "${CANNADIC_DICS_DIR}"
doins "${DICSDIRFILE}"
}
# @FUNCTION: cannadic_src_install
# @DESCRIPTION:
# Installs all dictionaries under ${WORKDIR}
# plus dics.dir and docs
cannadic_src_install() {
local f
for f in *.c[btl]d *.t; do
if [[ -s "${f}" ]]; then
cannadic-install "${f}"
fi
done 2> /dev/null
dicsdir-install || die
einstalldocs
}
# @FUNCTION: update-cannadic-dir
# @DESCRIPTION:
# Updates dics.dir for Canna Server, script for this part taken from Debian GNU/Linux
#
# compiles dics.dir files for Canna Server
# Copyright 2001 ISHIKAWA Mutsumi
# Licensed under the GNU General Public License, version 2. See the file
# /usr/portage/license/GPL-2 or <http://www.gnu.org/copyleft/gpl.txt>.
update-cannadic-dir() {
einfo
einfo "Updating dics.dir for Canna ..."
einfo
# write new dics.dir file in case we are interrupted
cat <<-EOF > "${CANNADIC_CANNA_DIR}"/dics.dir.update-new
# dics.dir -- automatically generated file by Portage.
# DO NOT EDIT BY HAND.
EOF
local f
for f in "${CANNADIC_DICS_DIR}"/*.dics.dir; do
echo "# ${f}" >> "${CANNADIC_CANNA_DIR}"/dics.dir.update-new
cat "${f}" >> "${CANNADIC_CANNA_DIR}"/dics.dir.update-new
einfo "Added ${f}."
done
mv "${CANNADIC_CANNA_DIR}"/dics.dir.update-new "${CANNADIC_CANNA_DIR}"/dics.dir
einfo
einfo "Done."
einfo
}
# @FUNCTION: cannadic_pkg_postinst
# @DESCRIPTION:
# Updates dics.dir and print out notice after install
cannadic_pkg_postinst() {
update-cannadic-dir
einfo
einfo "Please restart cannaserver to fit the changes."
einfo "You need to modify your config file (~/.canna) to enable dictionaries."
if [[ -n "${CANNADICS}" ]]; then
einfo "e.g) add $(for d in ${CANNADICS}; do echo -n "\"${d}\" "; done)to section use-dictionary()."
einfo "For details, see documents under /usr/share/doc/${PF}."
fi
einfo "If you do not have ~/.canna, you can find sample files in /usr/share/canna."
ewarn "If you are upgrading from existing dictionary, you may need to recreate"
ewarn "user dictionary if you have one."
einfo
}
# @FUNCTION: cannadic_pkg_postrm
# @DESCRIPTION:
# Updates dics.dir and print out notice after uninstall
cannadic_pkg_postrm() {
update-cannadic-dir
einfo
einfo "Please restart cannaserver to fit changes."
einfo "and modify your config file (~/.canna) to disable dictionary."
if [[ -n "${CANNADICS}" ]]; then
einfo "e.g) delete $(for d in ${CANNADICS}; do echo -n "\"${d}\" "; done)from section use-dictionary()."
fi
einfo
}

397
eclass/cargo.eclass Normal file
View File

@ -0,0 +1,397 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: cargo.eclass
# @MAINTAINER:
# rust@gentoo.org
# @AUTHOR:
# Doug Goldstein <cardoe@gentoo.org>
# @SUPPORTED_EAPIS: 6 7
# @BLURB: common functions and variables for cargo builds
if [[ -z ${_CARGO_ECLASS} ]]; then
_CARGO_ECLASS=1
# we need this for 'cargo vendor' subcommand and net.offline config knob
RUST_DEPEND=">=virtual/rust-1.37.0"
case "${EAPI:-0}" in
0|1|2|3|4|5|6)
die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
;;
7)
;;
*)
die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
;;
esac
inherit multiprocessing toolchain-funcs
if [[ ! ${CARGO_OPTIONAL} ]]; then
BDEPEND="${RUST_DEPEND}"
EXPORT_FUNCTIONS src_unpack src_configure src_compile src_install src_test
fi
IUSE="${IUSE} debug"
ECARGO_HOME="${WORKDIR}/cargo_home"
ECARGO_VENDOR="${ECARGO_HOME}/gentoo"
# @ECLASS-VARIABLE: CARGO_OPTIONAL
# @DEFAULT_UNSET
# @PRE_INHERIT
# @DESCRIPTION:
# If set to a non-null value, before inherit cargo part of the ebuild will
# be considered optional. No dependencies will be added and no phase
# functions will be exported.
#
# If you enable CARGO_OPTIONAL, you have to set BDEPEND on virtual/rust
# for your package and call at least cargo_gen_config manually before using
# other src_ functions of this eclass.
# note that cargo_gen_config is automatically called by cargo_src_unpack.
# @VARIABLE: myfeatures
# @DEFAULT_UNSET
# @DESCRIPTION:
# Optional cargo features defined as bash array.
# Should be defined before calling cargo_src_configure().
#
# Example package that has x11 and wayland as features, and disables default.
# @CODE
# src_configure() {
# local myfeatures=(
# $(usex X x11 '')
# $(usev wayland)
# )
# cargo_src_configure --no-default-features
# }
# @CODE
# @ECLASS-VARIABLE: ECARGO_REGISTRY_DIR
# @USER_VARIABLE
# @DEFAULT_UNSET
# @DESCRIPTION:
# Storage directory for cargo registry.
# Used by cargo_live_src_unpack to cache downloads.
# This is intended to be set by users.
# Ebuilds must not set it.
#
# Defaults to "${DISTDIR}/cargo-registry" it not set.
# @ECLASS-VARIABLE: ECARGO_OFFLINE
# @USER_VARIABLE
# @DEFAULT_UNSET
# @DESCRIPTION:
# If non-empty, this variable prevents online operations in
# cargo_live_src_unpack.
# Inherits value of EVCS_OFFLINE if not set explicitly.
# @ECLASS-VARIABLE: EVCS_UMASK
# @USER_VARIABLE
# @DEFAULT_UNSET
# @DESCRIPTION:
# Set this variable to a custom umask. This is intended to be set by
# users. By setting this to something like 002, it can make life easier
# for people who use cargo in a home directory, but are in the portage
# group, and then switch over to building with FEATURES=userpriv.
# Or vice-versa.
# @FUNCTION: cargo_crate_uris
# @DESCRIPTION:
# Generates the URIs to put in SRC_URI to help fetch dependencies.
cargo_crate_uris() {
local -r regex='^([a-zA-Z0-9_\-]+)-([0-9]+\.[0-9]+\.[0-9]+.*)$'
local crate
for crate in "$@"; do
local name version url
[[ $crate =~ $regex ]] || die "Could not parse name and version from crate: $crate"
name="${BASH_REMATCH[1]}"
version="${BASH_REMATCH[2]}"
url="https://crates.io/api/v1/crates/${name}/${version}/download -> ${crate}.crate"
echo "${url}"
done
}
# @FUNCTION: cargo_gen_config
# @DESCRIPTION:
# Generate the $CARGO_HOME/config necessary to use our local registry and settings.
# Cargo can also be configured through environment variables in addition to the TOML syntax below.
# For each configuration key below of the form foo.bar the environment variable CARGO_FOO_BAR
# can also be used to define the value.
# Environment variables will take precedent over TOML configuration,
# and currently only integer, boolean, and string keys are supported.
# For example the build.jobs key can also be defined by CARGO_BUILD_JOBS.
# Or setting CARGO_TERM_VERBOSE=false in make.conf will make build quieter.
cargo_gen_config() {
debug-print-function ${FUNCNAME} "$@"
mkdir -p "${ECARGO_HOME}" || die
cat > "${ECARGO_HOME}/config" <<- _EOF_ || die "Failed to create cargo config"
[source.gentoo]
directory = "${ECARGO_VENDOR}"
[source.crates-io]
replace-with = "gentoo"
local-registry = "/nonexistant"
[net]
offline = true
[build]
jobs = $(makeopts_jobs)
[term]
verbose = true
$([[ "${NOCOLOR}" = true || "${NOCOLOR}" = yes ]] && echo "color = 'never'")
_EOF_
export CARGO_HOME="${ECARGO_HOME}"
_CARGO_GEN_CONFIG_HAS_RUN=1
}
# @FUNCTION: cargo_src_unpack
# @DESCRIPTION:
# Unpacks the package and the cargo registry
cargo_src_unpack() {
debug-print-function ${FUNCNAME} "$@"
mkdir -p "${ECARGO_VENDOR}" || die
mkdir -p "${S}" || die
local archive shasum pkg
for archive in ${A}; do
case "${archive}" in
*.crate)
ebegin "Loading ${archive} into Cargo registry"
tar -xf "${DISTDIR}"/${archive} -C "${ECARGO_VENDOR}/" || die
# generate sha256sum of the crate itself as cargo needs this
shasum=$(sha256sum "${DISTDIR}"/${archive} | cut -d ' ' -f 1)
pkg=$(basename ${archive} .crate)
cat <<- EOF > ${ECARGO_VENDOR}/${pkg}/.cargo-checksum.json
{
"package": "${shasum}",
"files": {}
}
EOF
# if this is our target package we need it in ${WORKDIR} too
# to make ${S} (and handle any revisions too)
if [[ ${P} == ${pkg}* ]]; then
tar -xf "${DISTDIR}"/${archive} -C "${WORKDIR}" || die
fi
eend $?
;;
cargo-snapshot*)
ebegin "Unpacking ${archive}"
mkdir -p "${S}"/target/snapshot
tar -xzf "${DISTDIR}"/${archive} -C "${S}"/target/snapshot --strip-components 2 || die
# cargo's makefile needs this otherwise it will try to
# download it
touch "${S}"/target/snapshot/bin/cargo || die
eend $?
;;
*)
unpack ${archive}
;;
esac
done
cargo_gen_config
}
# @FUNCTION: cargo_live_src_unpack
# @DESCRIPTION:
# Runs 'cargo fetch' and vendors downloaded crates for offline use, used in live ebuilds
cargo_live_src_unpack() {
debug-print-function ${FUNCNAME} "$@"
[[ "${PV}" == *9999* ]] || die "${FUNCNAME} only allowed in live/9999 ebuilds"
[[ "${EBUILD_PHASE}" == unpack ]] || die "${FUNCNAME} only allowed in src_unpack"
mkdir -p "${S}" || die
mkdir -p "${ECARGO_VENDOR}" || die
mkdir -p "${ECARGO_HOME}" || die
local distdir=${PORTAGE_ACTUAL_DISTDIR:-${DISTDIR}}
: ${ECARGO_REGISTRY_DIR:=${distdir}/cargo-registry}
local offline="${ECARGO_OFFLINE:-${EVCS_OFFLINE}}"
if [[ ! -d ${ECARGO_REGISTRY_DIR} && ! ${offline} ]]; then
(
addwrite "${ECARGO_REGISTRY_DIR}"
mkdir -p "${ECARGO_REGISTRY_DIR}"
) || die "Unable to create ${ECARGO_REGISTRY_DIR}"
fi
if [[ ${offline} ]]; then
local subdir
for subdir in cache index src; do
if [[ ! -d ${ECARGO_REGISTRY_DIR}/registry/${subdir} ]]; then
eerror "Networking activity has been disabled via ECARGO_OFFLINE or EVCS_OFFLINE"
eerror "However, no valid cargo registry available at ${ECARGO_REGISTRY_DIR}"
die "Unable to proceed with ECARGO_OFFLINE/EVCS_OFFLINE."
fi
done
fi
if [[ ${EVCS_UMASK} ]]; then
local saved_umask=$(umask)
umask "${EVCS_UMASK}" || die "Bad options to umask: ${EVCS_UMASK}"
fi
pushd "${S}" > /dev/null || die
# Respect user settings befire cargo_gen_config is called.
if [[ ! ${CARGO_TERM_COLOR} ]]; then
[[ "${NOCOLOR}" = true || "${NOCOLOR}" = yes ]] && export CARGO_TERM_COLOR=never
local unset_color=true
fi
if [[ ! ${CARGO_TERM_VERBOSE} ]]; then
export CARGO_TERM_VERBOSE=true
local unset_verbose=true
fi
# Let cargo fetch to system-wide location.
# It will keep directory organized by itself.
addwrite "${ECARGO_REGISTRY_DIR}"
export CARGO_HOME="${ECARGO_REGISTRY_DIR}"
# Absence of quotes around offline arg is intentional, as cargo bails out if it encounters ''
einfo "cargo fetch ${offline:+--offline}"
cargo fetch ${offline:+--offline} || die #nowarn
# Let cargo copy all required crates to "${WORKDIR}" for offline use in later phases.
einfo "cargo vendor ${offline:+--offline} ${ECARGO_VENDOR}"
cargo vendor ${offline:+--offline} "${ECARGO_VENDOR}" || die #nowarn
# Users may have git checkouts made by cargo.
# While cargo vendors the sources, it still needs git checkout to be present.
# Copying full dir is an overkill, so just symlink it.
if [[ -d ${ECARGO_REGISTRY_DIR}/git ]]; then
ln -sv "${ECARGO_REGISTRY_DIR}/git" "${ECARGO_HOME}/git" || die
fi
popd > /dev/null || die
# Restore settings if needed.
[[ ${unset_color} ]] && unset CARGO_TERM_COLOR
[[ ${unset_verbose} ]] && unset CARGO_TERM_VERBOSE
if [[ ${saved_umask} ]]; then
umask "${saved_umask}" || die
fi
# After following calls, cargo will no longer use ${ECARGO_REGISTRY_DIR} as CARGO_HOME
# It will be forced into offline mode to prevent network access.
# But since we already vendored crates and symlinked git, it has all it needs to build.
unset CARGO_HOME
cargo_gen_config
}
# @FUNCTION: cargo_src_configure
# @DESCRIPTION:
# Configure cargo package features and arguments.
# Extra positional arguments supplied to this function
# will be passed to cargo in all phases.
# Make sure all cargo subcommands support flags passed here.
#
# Example for package that explicitly builds only 'baz' binary and
# enables 'barfeature' and optional 'foo' feature.
# will pass '--features barfeature --features foo --bin baz'
# in src_{compile,test,install}
#
# @CODE
# src_configure() {
# local myfeatures=(
# barfeature
# $(usev foo)
# )
# cargo_src_configure --bin baz
# }
# @CODE
#
# In some cases crates may need '--no-default-features' option,
# as there is no way to disable single feature, except disabling all.
# It can be passed directly to cargo_src_configure().
cargo_src_configure() {
debug-print-function ${FUNCNAME} "$@"
[[ -z ${myfeatures} ]] && declare -a myfeatures=()
local myfeaturestype=$(declare -p myfeatures 2>&-)
if [[ "${myfeaturestype}" != "declare -a myfeatures="* ]]; then
die "myfeatures must be declared as array"
fi
# transform array from simple feature list
# to multiple cargo args:
# --features feature1 --features feature2 ...
# this format is chosen because 2 other methods of
# listing features (space OR comma separated) require
# more fiddling with strings we'd like to avoid here.
myfeatures=( ${myfeatures[@]/#/--features } )
readonly ECARGO_ARGS=( ${myfeatures[@]} ${@} ${ECARGO_EXTRA_ARGS} )
[[ ${ECARGO_ARGS[@]} ]] && einfo "Configured with: ${ECARGO_ARGS[@]}"
}
# @FUNCTION: cargo_src_compile
# @DESCRIPTION:
# Build the package using cargo build
cargo_src_compile() {
debug-print-function ${FUNCNAME} "$@"
[[ ${_CARGO_GEN_CONFIG_HAS_RUN} ]] || \
die "FATAL: please call cargo_gen_config before using ${FUNCNAME}"
tc-export AR CC CXX PKG_CONFIG
set -- cargo build $(usex debug "" --release) ${ECARGO_ARGS[@]} "$@"
einfo "${@}"
"${@}" || die "cargo build failed"
}
# @FUNCTION: cargo_src_install
# @DESCRIPTION:
# Installs the binaries generated by cargo
# In come case workspaces need alternative --path parameter
# default is '--path ./' if nothing specified.
# '--path ./somedir' can be passed directly to cargo_src_install()
cargo_src_install() {
debug-print-function ${FUNCNAME} "$@"
[[ ${_CARGO_GEN_CONFIG_HAS_RUN} ]] || \
die "FATAL: please call cargo_gen_config before using ${FUNCNAME}"
set -- cargo install $(has --path ${@} || echo --path ./) \
--root "${ED}/usr" \
$(usex debug --debug "") \
${ECARGO_ARGS[@]} "$@"
einfo "${@}"
"${@}" || die "cargo install failed"
rm -f "${ED}/usr/.crates.toml" || die
rm -f "${ED}/usr/.crates2.json" || die
[ -d "${S}/man" ] && doman "${S}/man" || return 0
}
# @FUNCTION: cargo_src_test
# @DESCRIPTION:
# Test the package using cargo test
cargo_src_test() {
debug-print-function ${FUNCNAME} "$@"
[[ ${_CARGO_GEN_CONFIG_HAS_RUN} ]] || \
die "FATAL: please call cargo_gen_config before using ${FUNCNAME}"
set -- cargo test $(usex debug "" --release) ${ECARGO_ARGS[@]} "$@"
einfo "${@}"
"${@}" || die "cargo test failed"
}
fi

308
eclass/cdrom.eclass Normal file
View File

@ -0,0 +1,308 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: cdrom.eclass
# @MAINTAINER:
# games@gentoo.org
# @BLURB: Functions for CD-ROM handling
# @DESCRIPTION:
# Acquire CD(s) for those lovely CD-based emerges. Yes, this violates
# the whole "non-interactive" policy, but damnit I want CD support!
#
# Do not call these functions in pkg_* phases like pkg_setup as they
# should not be used for binary packages. Most packages using this
# eclass will require RESTRICT="bindist" but the point still stands.
# The functions are generally called in src_unpack.
if [[ -z ${_CDROM_ECLASS} ]]; then
_CDROM_ECLASS=1
inherit portability
# @ECLASS-VARIABLE: CDROM_OPTIONAL
# @DEFAULT_UNSET
# @DESCRIPTION:
# By default, the eclass sets PROPERTIES="interactive" on the assumption
# that people will be using these. If your package optionally supports
# disc-based installs then set this to "yes" and we'll set things
# conditionally based on USE="cdinstall".
if [[ ${CDROM_OPTIONAL} == "yes" ]] ; then
IUSE="cdinstall"
PROPERTIES+=" cdinstall? ( interactive )"
else
PROPERTIES+=" interactive"
fi
# @FUNCTION: cdrom_get_cds
# @USAGE: <cd1 file>[:alt cd1 file] [cd2 file[:alt cd2 file]] [...]
# @DESCRIPTION:
# Attempt to locate a CD based upon a file that is on the CD.
#
# If the data spans multiple discs then additional arguments can be
# given to check for more files. Call cdrom_load_next_cd() to scan for
# the next disc in the set.
#
# Sometimes it is necessary to support alternative CD "sets" where the
# contents differ. Alternative files for each disc can be appended to
# each argument, separated by the : character. This feature is
# frequently used to support installing from an existing installation.
# Note that after the first disc is detected, the set is locked so
# cdrom_load_next_cd() will only scan for files in that specific set on
# subsequent discs.
#
# The given files can be within named subdirectories. It is not
# necessary to specify different casings of the same filename as
# matching is done case-insensitively. Filenames can include special
# characters such as spaces. Only : is not allowed.
#
# If you don't want each disc to be referred to as "CD #1", "CD #2",
# etc. then you can optionally provide your own names. Set CDROM_NAME
# for a single disc, CDROM_NAMES as an array for multiple discs, or
# individual CDROM_NAME_# variables for each disc starting from 1.
#
# Despite what you may have seen in older ebuilds, it has never been
# possible to provide per-set disc names. This would not make sense as
# all the names are initially displayed before the first disc has been
# detected. As a workaround, you can redefine the name variable(s)
# after the first disc has been detected.
#
# This function ends with a cdrom_load_next_cd() call to scan for the
# first disc. For more details about variables read and written by this
# eclass, see that function's description.
cdrom_get_cds() {
unset CDROM_SET
export CDROM_CURRENT_CD=0
export CDROM_NUM_CDS="${#}"
local i
for i in $(seq ${#}); do
export CDROM_CHECK_${i}="${!i}"
done
# If the user has set CD_ROOT or CD_ROOT_1, don't bother informing
# them about which discs are needed as they presumably already know.
if [[ -n ${CD_ROOT}${CD_ROOT_1} ]] ; then
:
# Single disc info.
elif [[ ${#} -eq 1 ]] ; then
einfo "This ebuild will need the ${CDROM_NAME:-CD for ${PN}}"
echo
einfo "If you do not have the CD, but have the data files"
einfo "mounted somewhere on your filesystem, just export"
einfo "the variable CD_ROOT so that it points to the"
einfo "directory containing the files."
echo
einfo "For example:"
einfo "export CD_ROOT=/mnt/cdrom"
echo
# Multi disc info.
else
_cdrom_set_names
einfo "This package may need access to ${#} CDs."
local cdcnt
for cdcnt in $(seq ${#}); do
local var=CDROM_NAME_${cdcnt}
[[ ! -z ${!var} ]] && einfo " CD ${cdcnt}: ${!var}"
done
echo
einfo "If you do not have the CDs, but have the data files"
einfo "mounted somewhere on your filesystem, just export"
einfo "the following variables so they point to the right place:"
einfo $(printf "CD_ROOT_%d " $(seq ${#}))
echo
einfo "Or, if you have all the files in the same place, or"
einfo "you only have one CD, you can export CD_ROOT"
einfo "and that place will be used as the same data source"
einfo "for all the CDs."
echo
einfo "For example:"
einfo "export CD_ROOT=/mnt/cdrom"
echo
fi
# Scan for the first disc.
cdrom_load_next_cd
}
# @FUNCTION: cdrom_load_next_cd
# @DESCRIPTION:
# If multiple arguments were given to cdrom_get_cds() then you can call
# this function to scan for the next disc. This function is also called
# implicitly to scan for the first disc.
#
# The file(s) given to cdrom_get_cds() are scanned for on any mounted
# filesystem that resembles optical media. If no match is found then
# the user is prompted to insert and mount the disc and press enter to
# rescan. This will loop continuously until a match is found or the
# user aborts with Ctrl+C.
#
# The user can override the scan location by setting CD_ROOT for a
# single disc, CD_ROOT if multiple discs are merged into the same
# directory tree (useful for existing installations), or individual
# CD_ROOT_# variables for each disc starting from 1. If no match is
# found then the function dies with an error as a rescan will not help
# in this instance.
#
# Users wanting to set CD_ROOT or CD_ROOT_# for specific packages
# persistently can do so using Portage's /etc/portage/env feature.
#
# Regardless of which scanning method is used, several variables are set
# by this function for you to use:
#
# CDROM_ROOT: Root path of the detected disc.
# CDROM_MATCH: Path of the matched file, relative to CDROM_ROOT.
# CDROM_ABSMATCH: Absolute path of the matched file.
# CDROM_SET: The matching set number, starting from 0.
#
# The casing of CDROM_MATCH may not be the same as the argument given to
# cdrom_get_cds() as matching is done case-insensitively. You should
# therefore use this variable (or CDROM_ABSMATCH) when performing file
# operations to ensure the file is found. Use newins rather than doins
# to keep the final result consistent and take advantage of Bash
# case-conversion features like ${FOO,,}.
#
# Chances are that you'll need more than just the matched file from each
# disc though. You should not assume the casing of these files either
# but dealing with this goes beyond the scope of this ebuild. For a
# good example, see games-action/descent2-data, which combines advanced
# globbing with advanced tar features to concisely deal with
# case-insensitive matching, case conversion, file moves, and
# conditional exclusion.
#
# Copying directly from a mounted disc using doins/newins will remove
# any read-only permissions but be aware of these when copying to an
# intermediate directory first. Attempting to clean a build directory
# containing read-only files as a non-root user will result in an error.
# If you're using tar as suggested above then you can easily work around
# this with --mode=u+w.
#
# Note that you can only go forwards in the disc list, so make sure you
# only call this function when you're done using the current disc.
#
# If you cd to any location within CDROM_ROOT then remember to leave the
# directory before calling this function again, otherwise the user won't
# be able to unmount the current disc.
cdrom_load_next_cd() {
local showedmsg=0 showjolietmsg=0
unset CDROM_ROOT
((++CDROM_CURRENT_CD))
_cdrom_set_names
while true ; do
local i cdset
: CD_ROOT_${CDROM_CURRENT_CD}
export CDROM_ROOT=${CD_ROOT:-${!_}}
local var="CDROM_CHECK_${CDROM_CURRENT_CD}"
IFS=: read -r -a cdset -d "" <<< "${!var}"
for i in $(seq ${CDROM_SET:-0} ${CDROM_SET:-$((${#cdset[@]} - 1))}); do
local f=${cdset[${i}]} point= node= fs= opts=
if [[ -z ${CDROM_ROOT} ]] ; then
while read point node fs opts ; do
has "${fs}" cd9660 iso9660 udf || continue
point=${point//\040/ }
export CDROM_MATCH=$(_cdrom_glob_match "${point}" "${f}")
[[ -z ${CDROM_MATCH} ]] && continue
export CDROM_ROOT=${point}
done <<< "$(get_mounts)"
else
export CDROM_MATCH=$(_cdrom_glob_match "${CDROM_ROOT}" "${f}")
fi
if [[ -n ${CDROM_MATCH} ]] ; then
export CDROM_ABSMATCH=${CDROM_ROOT}/${CDROM_MATCH}
export CDROM_SET=${i}
break 2
fi
done
# If we get here then we were unable to locate a match. If
# CDROM_ROOT is non-empty then this implies that a CD_ROOT
# variable was given and we should therefore abort immediately.
if [[ -n ${CDROM_ROOT} ]] ; then
die "unable to locate CD #${CDROM_CURRENT_CD} root at ${CDROM_ROOT}"
fi
if [[ ${showedmsg} -eq 0 ]] ; then
if [[ ${CDROM_NUM_CDS} -eq 1 ]] ; then
einfo "Please insert+mount the ${CDROM_NAME:-CD for ${PN}} now !"
else
local var="CDROM_NAME_${CDROM_CURRENT_CD}"
if [[ -z ${!var} ]] ; then
einfo "Please insert+mount CD #${CDROM_CURRENT_CD} for ${PN} now !"
else
einfo "Please insert+mount the ${!var} now !"
fi
fi
showedmsg=1
fi
einfo "Press return to scan for the CD again"
einfo "or hit CTRL+C to abort the emerge."
if [[ ${showjolietmsg} -eq 0 ]] ; then
showjolietmsg=1
else
echo
ewarn "If you are having trouble with the detection"
ewarn "of your CD, it is possible that you do not have"
ewarn "Joliet support enabled in your kernel. Please"
ewarn "check that CONFIG_JOLIET is enabled in your kernel."
fi
read || die "something is screwed with your system"
done
einfo "Found CD #${CDROM_CURRENT_CD} root at ${CDROM_ROOT}"
}
# @FUNCTION: _cdrom_glob_match
# @USAGE: <root directory> <path>
# @INTERNAL
# @DESCRIPTION:
# Locates the given path ($2) within the given root directory ($1)
# case-insensitively and returns the first actual matching path. This
# eclass previously used "find -iname" but it only checked the file
# case-insensitively and not the directories. There is "find -ipath"
# but this does not intelligently skip non-matching paths, making it
# slow. Case-insensitive matching can only be applied to patterns so
# extended globbing is used to turn regular strings into patterns. All
# special characters are escaped so don't worry about breaking this.
_cdrom_glob_match() {
# The following line turns this:
# foo*foo/bar bar/baz/file.zip
#
# Into this:
# ?(foo\*foo)/?(bar\ bar)/?(baz)/?(file\.zip)
#
# This turns every path component into an escaped extended glob
# pattern to allow case-insensitive matching. Globs cannot span
# directories so each component becomes an individual pattern.
local p=\?\($(sed -e 's:[^A-Za-z0-9/]:\\\0:g' -e 's:/:)/?(:g' <<< "$2" || die)\)
(
cd "$1" 2>/dev/null || return
shopt -s extglob nocaseglob nullglob || die
# The first person to make this work without an eval wins a
# cookie. It breaks without it when spaces are present.
eval "ARRAY=( ${p%\?()} )"
echo ${ARRAY[0]}
)
}
# @FUNCTION: _cdrom_set_names
# @INTERNAL
# @DESCRIPTION:
# Populate CDROM_NAME_# variables with the CDROM_NAMES array.
_cdrom_set_names() {
if [[ -n ${CDROM_NAMES} ]] ; then
local i
for i in $(seq ${#CDROM_NAMES[@]}); do
export CDROM_NAME_${i}="${CDROM_NAMES[$((${i} - 1))]}"
done
fi
}
fi

361
eclass/check-reqs.eclass Normal file
View File

@ -0,0 +1,361 @@
# Copyright 2004-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: check-reqs.eclass
# @MAINTAINER:
# QA Team <qa@gentoo.org>
# @AUTHOR:
# Bo Ørsted Andresen <zlin@gentoo.org>
# Original Author: Ciaran McCreesh <ciaranm@gentoo.org>
# @SUPPORTED_EAPIS: 4 5 6 7
# @BLURB: Provides a uniform way of handling ebuild which have very high build requirements
# @DESCRIPTION:
# This eclass provides a uniform way of handling ebuilds which have very high
# build requirements in terms of memory or disk space. It provides a function
# which should usually be called during pkg_setup().
#
# The chosen action only happens when the system's resources are detected
# correctly and only if they are below the threshold specified by the package.
#
# @CODE
# # need this much memory (does *not* check swap)
# CHECKREQS_MEMORY="256M"
#
# # need this much temporary build space
# CHECKREQS_DISK_BUILD="2G"
#
# # install will need this much space in /usr
# CHECKREQS_DISK_USR="1G"
#
# # install will need this much space in /var
# CHECKREQS_DISK_VAR="1024M"
#
# @CODE
#
# If you don't specify a value for, say, CHECKREQS_MEMORY, then the test is not
# carried out.
#
# These checks should probably mostly work on non-Linux, and they should
# probably degrade gracefully if they don't. Probably.
if [[ ! ${_CHECK_REQS_ECLASS_} ]]; then
# @ECLASS-VARIABLE: CHECKREQS_MEMORY
# @DEFAULT_UNSET
# @DESCRIPTION:
# How much RAM is needed? Eg.: CHECKREQS_MEMORY=15M
# @ECLASS-VARIABLE: CHECKREQS_DISK_BUILD
# @DEFAULT_UNSET
# @DESCRIPTION:
# How much diskspace is needed to build the package? Eg.: CHECKREQS_DISK_BUILD=2T
# @ECLASS-VARIABLE: CHECKREQS_DISK_USR
# @DEFAULT_UNSET
# @DESCRIPTION:
# How much space in /usr is needed to install the package? Eg.: CHECKREQS_DISK_USR=15G
# @ECLASS-VARIABLE: CHECKREQS_DISK_VAR
# @DEFAULT_UNSET
# @DESCRIPTION:
# How much space is needed in /var? Eg.: CHECKREQS_DISK_VAR=3000M
case ${EAPI:-0} in
4|5|6|7) ;;
*) die "${ECLASS}: EAPI=${EAPI:-0} is not supported" ;;
esac
EXPORT_FUNCTIONS pkg_pretend pkg_setup
# Obsolete function executing all the checks and printing out results
check_reqs() {
eerror "Package calling old ${FUNCNAME} function."
eerror "It should call check-reqs_pkg_pretend and check-reqs_pkg_setup."
die "${FUNCNAME} is banned"
}
# @FUNCTION: check-reqs_pkg_setup
# @DESCRIPTION:
# Exported function running the resources checks in pkg_setup phase.
# It should be run in both phases to ensure condition changes between
# pkg_pretend and pkg_setup won't affect the build.
check-reqs_pkg_setup() {
debug-print-function ${FUNCNAME} "$@"
check-reqs_prepare
check-reqs_run
check-reqs_output
}
# @FUNCTION: check-reqs_pkg_pretend
# @DESCRIPTION:
# Exported function running the resources checks in pkg_pretend phase.
check-reqs_pkg_pretend() {
debug-print-function ${FUNCNAME} "$@"
check-reqs_pkg_setup "$@"
}
# @FUNCTION: check-reqs_prepare
# @INTERNAL
# @DESCRIPTION:
# Internal function that checks the variables that should be defined.
check-reqs_prepare() {
debug-print-function ${FUNCNAME} "$@"
if [[ -z ${CHECKREQS_MEMORY} &&
-z ${CHECKREQS_DISK_BUILD} &&
-z ${CHECKREQS_DISK_USR} &&
-z ${CHECKREQS_DISK_VAR} ]]; then
eerror "Set some check-reqs eclass variables if you want to use it."
eerror "If you are user and see this message file a bug against the package."
die "${FUNCNAME}: check-reqs eclass called but not actually used!"
fi
}
# @FUNCTION: check-reqs_run
# @INTERNAL
# @DESCRIPTION:
# Internal function that runs the check based on variable settings.
check-reqs_run() {
debug-print-function ${FUNCNAME} "$@"
# some people are *censored*
unset CHECKREQS_FAILED
if [[ ${MERGE_TYPE} != binary ]]; then
[[ -n ${CHECKREQS_MEMORY} ]] && \
check-reqs_memory \
${CHECKREQS_MEMORY}
[[ -n ${CHECKREQS_DISK_BUILD} ]] && \
check-reqs_disk \
"${T}" \
"${CHECKREQS_DISK_BUILD}"
fi
if [[ ${MERGE_TYPE} != buildonly ]]; then
[[ -n ${CHECKREQS_DISK_USR} ]] && \
check-reqs_disk \
"${EROOT%/}/usr" \
"${CHECKREQS_DISK_USR}"
[[ -n ${CHECKREQS_DISK_VAR} ]] && \
check-reqs_disk \
"${EROOT%/}/var" \
"${CHECKREQS_DISK_VAR}"
fi
}
# @FUNCTION: check-reqs_get_kibibytes
# @INTERNAL
# @DESCRIPTION:
# Internal function that returns number in KiB.
# Returns 1024**2 for 1G or 1024**3 for 1T.
check-reqs_get_kibibytes() {
debug-print-function ${FUNCNAME} "$@"
[[ -z ${1} ]] && die "Usage: ${FUNCNAME} [size]"
local unit=${1:(-1)}
local size=${1%[GMT]}
case ${unit} in
M) echo $((1024 * size)) ;;
G) echo $((1024 * 1024 * size)) ;;
T) echo $((1024 * 1024 * 1024 * size)) ;;
*)
die "${FUNCNAME}: Unknown unit: ${unit}"
;;
esac
}
# @FUNCTION: check-reqs_get_number
# @INTERNAL
# @DESCRIPTION:
# Internal function that returns the numerical value without the unit.
# Returns "1" for "1G" or "150" for "150T".
check-reqs_get_number() {
debug-print-function ${FUNCNAME} "$@"
[[ -z ${1} ]] && die "Usage: ${FUNCNAME} [size]"
local size=${1%[GMT]}
[[ ${size} == ${1} ]] && die "${FUNCNAME}: Missing unit: ${1}"
echo ${size}
}
# @FUNCTION: check-reqs_get_unit
# @INTERNAL
# @DESCRIPTION:
# Internal function that returns the unit without the numerical value.
# Returns "GiB" for "1G" or "TiB" for "150T".
check-reqs_get_unit() {
debug-print-function ${FUNCNAME} "$@"
[[ -z ${1} ]] && die "Usage: ${FUNCNAME} [size]"
local unit=${1:(-1)}
case ${unit} in
M) echo "MiB" ;;
G) echo "GiB" ;;
T) echo "TiB" ;;
*)
die "${FUNCNAME}: Unknown unit: ${unit}"
;;
esac
}
# @FUNCTION: check-reqs_output
# @INTERNAL
# @DESCRIPTION:
# Internal function that prints the warning and dies if required based on
# the test results.
check-reqs_output() {
debug-print-function ${FUNCNAME} "$@"
local msg="ewarn"
[[ ${EBUILD_PHASE} == "pretend" && -z ${I_KNOW_WHAT_I_AM_DOING} ]] && msg="eerror"
if [[ -n ${CHECKREQS_FAILED} ]]; then
${msg}
${msg} "Space constraints set in the ebuild were not met!"
${msg} "The build will most probably fail, you should enhance the space"
${msg} "as per failed tests."
${msg}
[[ ${EBUILD_PHASE} == "pretend" && -z ${I_KNOW_WHAT_I_AM_DOING} ]] && \
die "Build requirements not met!"
fi
}
# @FUNCTION: check-reqs_memory
# @INTERNAL
# @DESCRIPTION:
# Internal function that checks size of RAM.
check-reqs_memory() {
debug-print-function ${FUNCNAME} "$@"
[[ -z ${1} ]] && die "Usage: ${FUNCNAME} [size]"
local size=${1}
local actual_memory
local actual_swap
check-reqs_start_phase \
${size} \
"RAM"
if [[ -r /proc/meminfo ]] ; then
actual_memory=$(awk '/MemTotal/ { print $2 }' /proc/meminfo)
actual_swap=$(awk '/SwapTotal/ { print $2 }' /proc/meminfo)
else
actual_memory=$(sysctl hw.physmem 2>/dev/null)
[[ $? -eq 0 ]] && actual_memory=$(echo "${actual_memory}" \
| sed -e 's/^[^:=]*[:=][[:space:]]*//')
actual_swap=$(sysctl vm.swap_total 2>/dev/null)
[[ $? -eq 0 ]] && actual_swap=$(echo "${actual_swap}" \
| sed -e 's/^[^:=]*[:=][[:space:]]*//')
fi
if [[ -n ${actual_memory} ]] ; then
if [[ ${actual_memory} -ge $(check-reqs_get_kibibytes ${size}) ]] ; then
eend 0
elif [[ -n ${actual_swap} && $((${actual_memory} + ${actual_swap})) \
-ge $(check-reqs_get_kibibytes ${size}) ]] ; then
ewarn "Amount of main memory is insufficient, but amount"
ewarn "of main memory combined with swap is sufficient."
ewarn "Build process may make computer very slow!"
eend 0
else
eend 1
check-reqs_unsatisfied \
${size} \
"RAM"
fi
else
eend 1
ewarn "Couldn't determine amount of memory, skipping..."
fi
}
# @FUNCTION: check-reqs_disk
# @INTERNAL
# @DESCRIPTION:
# Internal function that checks space on the harddrive.
check-reqs_disk() {
debug-print-function ${FUNCNAME} "$@"
[[ -z ${2} ]] && die "Usage: ${FUNCNAME} [path] [size]"
local path=${1}
local size=${2}
local space_kbi
check-reqs_start_phase \
${size} \
"disk space at \"${path}\""
space_kbi=$(df -Pk "${1}" 2>/dev/null | awk 'FNR == 2 {print $4}')
if [[ $? == 0 && -n ${space_kbi} ]] ; then
if [[ ${space_kbi} -lt $(check-reqs_get_kibibytes ${size}) ]] ; then
eend 1
check-reqs_unsatisfied \
${size} \
"disk space at \"${path}\""
else
eend 0
fi
else
eend 1
ewarn "Couldn't determine disk space, skipping..."
fi
}
# @FUNCTION: check-reqs_start_phase
# @INTERNAL
# @DESCRIPTION:
# Internal function that inform about started check
check-reqs_start_phase() {
debug-print-function ${FUNCNAME} "$@"
[[ -z ${2} ]] && die "Usage: ${FUNCNAME} [size] [location]"
local size=${1}
local location=${2}
local sizeunit="$(check-reqs_get_number ${size}) $(check-reqs_get_unit ${size})"
ebegin "Checking for at least ${sizeunit} ${location}"
}
# @FUNCTION: check-reqs_unsatisfied
# @INTERNAL
# @DESCRIPTION:
# Internal function that inform about check result.
# It has different output between pretend and setup phase,
# where in pretend phase it is fatal.
check-reqs_unsatisfied() {
debug-print-function ${FUNCNAME} "$@"
[[ -z ${2} ]] && die "Usage: ${FUNCNAME} [size] [location]"
local msg="ewarn"
local size=${1}
local location=${2}
local sizeunit="$(check-reqs_get_number ${size}) $(check-reqs_get_unit ${size})"
[[ ${EBUILD_PHASE} == "pretend" && -z ${I_KNOW_WHAT_I_AM_DOING} ]] && msg="eerror"
${msg} "There is NOT at least ${sizeunit} ${location}"
# @ECLASS-VARIABLE: CHECKREQS_FAILED
# @DESCRIPTION:
# @INTERNAL
# If set the checks failed and eclass should abort the build.
# Internal, do not set yourself.
CHECKREQS_FAILED="true"
}
_CHECK_REQS_ECLASS_=1
fi

178
eclass/chromium-2.eclass Normal file
View File

@ -0,0 +1,178 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: chromium-2.eclass
# @MAINTAINER:
# Chromium Herd <chromium@gentoo.org>
# @AUTHOR:
# Mike Gilbert <floppym@gentoo.org>
# @BLURB: Shared functions for chromium and google-chrome
inherit eutils linux-info
if [[ ${PN} == chromium ]]; then
IUSE+=" custom-cflags"
fi
# @FUNCTION: chromium_suid_sandbox_check_kernel_config
# @USAGE:
# @DESCRIPTION:
# Ensures the system kernel supports features needed for SUID sandbox to work.
chromium_suid_sandbox_check_kernel_config() {
has "${EAPI:-0}" 0 1 2 3 && die "EAPI=${EAPI} is not supported"
if [[ "${MERGE_TYPE}" == "source" || "${MERGE_TYPE}" == "binary" ]]; then
# Warn if the kernel does not support features needed for sandboxing.
# Bug #363987.
ERROR_PID_NS="PID_NS is required for sandbox to work"
ERROR_NET_NS="NET_NS is required for sandbox to work"
ERROR_USER_NS="USER_NS is required for sandbox to work"
ERROR_SECCOMP_FILTER="SECCOMP_FILTER is required for sandbox to work"
# Warn if the kernel does not support features needed for the browser to work
# (bug #552576, bug #556286).
ERROR_ADVISE_SYSCALLS="CONFIG_ADVISE_SYSCALLS is required for the renderer (bug #552576)"
ERROR_COMPAT_VDSO="CONFIG_COMPAT_VDSO causes segfaults (bug #556286)"
ERROR_GRKERNSEC="CONFIG_GRKERNSEC breaks sandbox (bug #613668)"
CONFIG_CHECK="~PID_NS ~NET_NS ~SECCOMP_FILTER ~USER_NS ~ADVISE_SYSCALLS ~!COMPAT_VDSO ~!GRKERNSEC"
check_extra_config
fi
}
# @ECLASS-VARIABLE: CHROMIUM_LANGS
# @DEFAULT_UNSET
# @DESCRIPTION:
# List of language packs available for this package.
_chromium_set_l10n_IUSE() {
[[ ${EAPI:-0} == 0 ]] && die "EAPI=${EAPI} is not supported"
local lang
for lang in ${CHROMIUM_LANGS}; do
# Default to enabled since we bundle them anyway.
# USE-expansion will take care of disabling the langs the user has not
# selected via L10N.
IUSE+=" +l10n_${lang}"
done
}
if [[ ${CHROMIUM_LANGS} ]]; then
_chromium_set_l10n_IUSE
fi
# @FUNCTION: chromium_remove_language_paks
# @USAGE:
# @DESCRIPTION:
# Removes pak files from the current directory for languages that the user has
# not selected via the L10N variable.
# Also performs QA checks to ensure CHROMIUM_LANGS has been set correctly.
chromium_remove_language_paks() {
local lang pak
# Look for missing pak files.
for lang in ${CHROMIUM_LANGS}; do
if [[ ! -e ${lang}.pak ]]; then
eqawarn "L10N warning: no .pak file for ${lang} (${lang}.pak not found)"
fi
done
# Bug 588198
rm -f fake-bidi.pak || die
rm -f fake-bidi.pak.info || die
# Look for extra pak files.
# Remove pak files that the user does not want.
for pak in *.pak; do
lang=${pak%.pak}
if [[ ${lang} == en-US ]]; then
continue
fi
if ! has ${lang} ${CHROMIUM_LANGS}; then
eqawarn "L10N warning: no ${lang} in LANGS"
continue
fi
if ! use l10n_${lang}; then
rm "${pak}" || die
rm -f "${pak}.info" || die
fi
done
}
chromium_pkg_die() {
if [[ "${EBUILD_PHASE}" != "compile" ]]; then
return
fi
# Prevent user problems like bug #348235.
if ( shopt -s extglob; is-flagq '-g?(gdb)?([1-9])' ); then
ewarn
ewarn "You have enabled debug info (i.e. -g or -ggdb in your CFLAGS/CXXFLAGS)."
ewarn "This produces very large build files causes the linker to consume large"
ewarn "amounts of memory."
ewarn
ewarn "Please try removing -g{,gdb} before reporting a bug."
ewarn
fi
# ccache often causes bogus compile failures, especially when the cache gets
# corrupted.
if has ccache ${FEATURES}; then
ewarn
ewarn "You have enabled ccache. Please try disabling ccache"
ewarn "before reporting a bug."
ewarn
fi
# No ricer bugs.
if in_iuse custom-cflags && use custom-cflags; then
ewarn
ewarn "You have enabled the custom-cflags USE flag."
ewarn "Please disable it before reporting a bug."
ewarn
fi
# If the system doesn't have enough memory, the compilation is known to
# fail. Print info about memory to recognize this condition.
einfo
einfo "$(grep MemTotal /proc/meminfo)"
einfo "$(grep SwapTotal /proc/meminfo)"
einfo
}
# @VARIABLE: EGYP_CHROMIUM_COMMAND
# @DESCRIPTION:
# Path to the gyp_chromium script.
: ${EGYP_CHROMIUM_COMMAND:=build/gyp_chromium}
# @VARIABLE: EGYP_CHROMIUM_DEPTH
# @DESCRIPTION:
# Depth for egyp_chromium.
: ${EGYP_CHROMIUM_DEPTH:=.}
# @FUNCTION: egyp_chromium
# @USAGE: [gyp arguments]
# @DESCRIPTION:
# Calls EGYP_CHROMIUM_COMMAND with depth EGYP_CHROMIUM_DEPTH and given
# arguments. The full command line is echoed for logging.
egyp_chromium() {
set -- "${EGYP_CHROMIUM_COMMAND}" --depth="${EGYP_CHROMIUM_DEPTH}" "$@"
echo "$@"
"$@"
}
# @FUNCTION: gyp_use
# @USAGE: <USE flag> [GYP flag] [true suffix] [false suffix]
# @DESCRIPTION:
# If USE flag is set, echo -D[GYP flag]=[true suffix].
#
# If USE flag is not set, echo -D[GYP flag]=[false suffix].
#
# [GYP flag] defaults to use_[USE flag] with hyphens converted to underscores.
#
# [true suffix] defaults to 1. [false suffix] defaults to 0.
gyp_use() {
local gypflag="-D${2:-use_${1//-/_}}="
usex "$1" "${gypflag}" "${gypflag}" "${3-1}" "${4-0}"
}

View File

@ -0,0 +1,87 @@
# Copyright 1999-2019 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: cmake-multilib.eclass
# @MAINTAINER:
# gx86-multilib team <multilib@gentoo.org>
# @AUTHOR:
# Author: Michał Górny <mgorny@gentoo.org>
# @SUPPORTED_EAPIS: 6 7
# @BLURB: cmake-utils wrapper for multilib builds
# @DESCRIPTION:
# The cmake-multilib.eclass provides a glue between cmake-utils.eclass(5)
# and multilib-minimal.eclass(5), aiming to provide a convenient way
# to build packages using cmake for multiple ABIs.
#
# Inheriting this eclass sets IUSE and exports default multilib_src_*()
# sub-phases that call cmake-utils phase functions for each ABI enabled.
# The multilib_src_*() functions can be defined in ebuild just like
# in multilib-minimal, yet they ought to call appropriate cmake-utils
# phase rather than 'default'.
# @ECLASS-VARIABLE: CMAKE_ECLASS
# @DESCRIPTION:
# Default is "cmake-utils" for compatibility. Specify "cmake" for ebuilds
# that ported from cmake-utils.eclass to cmake.eclass already.
: ${CMAKE_ECLASS:=cmake-utils}
case ${EAPI:-0} in
[67]) ;;
*) die "EAPI=${EAPI} is not supported" ;;
esac
if [[ ${CMAKE_IN_SOURCE_BUILD} ]]; then
die "${ECLASS}: multilib support requires out-of-source builds."
fi
case ${CMAKE_ECLASS} in
cmake-utils|cmake) ;;
*)
eerror "Unknown value for \${CMAKE_ECLASS}"
die "Value ${CMAKE_ECLASS} is not supported"
;;
esac
inherit ${CMAKE_ECLASS} multilib-minimal
EXPORT_FUNCTIONS src_configure src_compile src_test src_install
cmake-multilib_src_configure() {
local _cmake_args=( "${@}" )
multilib-minimal_src_configure
}
multilib_src_configure() {
${CMAKE_ECLASS}_src_configure "${_cmake_args[@]}"
}
cmake-multilib_src_compile() {
local _cmake_args=( "${@}" )
multilib-minimal_src_compile
}
multilib_src_compile() {
${CMAKE_ECLASS}_src_compile "${_cmake_args[@]}"
}
cmake-multilib_src_test() {
local _cmake_args=( "${@}" )
multilib-minimal_src_test
}
multilib_src_test() {
${CMAKE_ECLASS}_src_test "${_cmake_args[@]}"
}
cmake-multilib_src_install() {
local _cmake_args=( "${@}" )
multilib-minimal_src_install
}
multilib_src_install() {
${CMAKE_ECLASS}_src_install "${_cmake_args[@]}"
}

845
eclass/cmake-utils.eclass Normal file
View File

@ -0,0 +1,845 @@
# Copyright 1999-2019 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: cmake-utils.eclass
# @MAINTAINER:
# kde@gentoo.org
# @AUTHOR:
# Tomáš Chvátal <scarabeus@gentoo.org>
# Maciej Mrozowski <reavertm@gentoo.org>
# (undisclosed contributors)
# Original author: Zephyrus (zephyrus@mirach.it)
# @SUPPORTED_EAPIS: 5 6 7
# @BLURB: common ebuild functions for cmake-based packages
# @DESCRIPTION:
# DEPRECATED: This no longer receives any changes. Everyone must port to cmake.eclass.
# The cmake-utils eclass makes creating ebuilds for cmake-based packages much easier.
# It provides all inherited features (DOCS, HTML_DOCS, PATCHES) along with out-of-source
# builds (default), in-source builds and an implementation of the well-known use_enable
# and use_with functions for CMake.
if [[ -z ${_CMAKE_UTILS_ECLASS} ]]; then
_CMAKE_UTILS_ECLASS=1
# @ECLASS-VARIABLE: BUILD_DIR
# @DESCRIPTION:
# Build directory where all cmake processed files should be generated.
# For in-source build it's fixed to ${CMAKE_USE_DIR}.
# For out-of-source build it can be overridden, by default it uses
# ${WORKDIR}/${P}_build.
#
# This variable has been called CMAKE_BUILD_DIR formerly.
# It is set under that name for compatibility.
# @ECLASS-VARIABLE: CMAKE_BINARY
# @DESCRIPTION:
# Eclass can use different cmake binary than the one provided in by system.
: ${CMAKE_BINARY:=cmake}
# @ECLASS-VARIABLE: CMAKE_BUILD_TYPE
# @DESCRIPTION:
# Set to override default CMAKE_BUILD_TYPE. Only useful for packages
# known to make use of "if (CMAKE_BUILD_TYPE MATCHES xxx)".
# If about to be set - needs to be set before invoking cmake-utils_src_configure.
# You usualy do *NOT* want nor need to set it as it pulls CMake default build-type
# specific compiler flags overriding make.conf.
: ${CMAKE_BUILD_TYPE:=Gentoo}
# @ECLASS-VARIABLE: CMAKE_IN_SOURCE_BUILD
# @DEFAULT_UNSET
# @DESCRIPTION:
# Set to enable in-source build.
# @ECLASS-VARIABLE: CMAKE_MAKEFILE_GENERATOR
# @DEFAULT_UNSET
# @DESCRIPTION:
# Specify a makefile generator to be used by cmake.
# At this point only "emake" and "ninja" are supported.
# In EAPI 7 and above, the default is set to "ninja",
# whereas in EAPIs below 7, it is set to "emake".
# @ECLASS-VARIABLE: CMAKE_MIN_VERSION
# @DESCRIPTION:
# Specify the minimum required CMake version.
: ${CMAKE_MIN_VERSION:=3.9.6}
# @ECLASS-VARIABLE: CMAKE_REMOVE_MODULES
# @DESCRIPTION:
# Do we want to remove anything? yes or whatever else for no
: ${CMAKE_REMOVE_MODULES:=yes}
# @ECLASS-VARIABLE: CMAKE_REMOVE_MODULES_LIST
# @DESCRIPTION:
# Space-separated list of CMake modules that will be removed in $S during src_prepare,
# in order to force packages to use the system version.
: ${CMAKE_REMOVE_MODULES_LIST:=FindBLAS FindLAPACK}
# @ECLASS-VARIABLE: CMAKE_USE_DIR
# @DESCRIPTION:
# Sets the directory where we are working with cmake.
# For example when application uses autotools and only one
# plugin needs to be done by cmake.
# By default it uses ${S}.
# @ECLASS-VARIABLE: CMAKE_VERBOSE
# @DESCRIPTION:
# Set to OFF to disable verbose messages during compilation
: ${CMAKE_VERBOSE:=ON}
# @ECLASS-VARIABLE: CMAKE_WARN_UNUSED_CLI
# @DESCRIPTION:
# Warn about variables that are declared on the command line
# but not used. Might give false-positives.
# "no" to disable (default) or anything else to enable.
# @ECLASS-VARIABLE: CMAKE_EXTRA_CACHE_FILE
# @DEFAULT_UNSET
# @DESCRIPTION:
# Specifies an extra cache file to pass to cmake. This is the analog of EXTRA_ECONF
# for econf and is needed to pass TRY_RUN results when cross-compiling.
# Should be set by user in a per-package basis in /etc/portage/package.env.
# @ECLASS-VARIABLE: CMAKE_UTILS_QA_SRC_DIR_READONLY
# @DEFAULT_UNSET
# @DESCRIPTION:
# After running cmake-utils_src_prepare, sets ${S} to read-only. This is
# a user flag and should under _no circumstances_ be set in the ebuild.
# Helps in improving QA of build systems that write to source tree.
case ${EAPI} in
5) : ${CMAKE_WARN_UNUSED_CLI:=no} ;;
6|7) : ${CMAKE_WARN_UNUSED_CLI:=yes} ;;
*) die "EAPI=${EAPI:-0} is not supported" ;;
esac
inherit toolchain-funcs ninja-utils flag-o-matic multiprocessing xdg-utils
case ${EAPI} in
[56])
: ${CMAKE_MAKEFILE_GENERATOR:=emake}
inherit eutils multilib
;;
*)
: ${CMAKE_MAKEFILE_GENERATOR:=ninja}
;;
esac
EXPORT_FUNCTIONS src_prepare src_configure src_compile src_test src_install
if [[ ${WANT_CMAKE} ]]; then
if [[ ${EAPI} != [56] ]]; then
die "\${WANT_CMAKE} has been removed and is a no-op now"
else
eqawarn "\${WANT_CMAKE} has been removed and is a no-op now"
fi
fi
[[ ${PREFIX} ]] && die "\${PREFIX} has been removed and is a no-op now"
case ${CMAKE_MAKEFILE_GENERATOR} in
emake)
BDEPEND="sys-devel/make"
;;
ninja)
BDEPEND="dev-util/ninja"
;;
*)
eerror "Unknown value for \${CMAKE_MAKEFILE_GENERATOR}"
die "Value ${CMAKE_MAKEFILE_GENERATOR} is not supported"
;;
esac
if [[ ${PN} != cmake ]]; then
BDEPEND+=" >=dev-util/cmake-${CMAKE_MIN_VERSION}"
fi
case ${EAPI} in
7) ;;
*) DEPEND=" ${BDEPEND}" ;;
esac
# Internal functions used by cmake-utils_use_*
_cmake_use_me_now() {
debug-print-function ${FUNCNAME} "$@"
local arg=$2
[[ ! -z $3 ]] && arg=$3
[[ ${EAPI} == 5 ]] || die "${FUNCNAME[1]} is banned in EAPI 6 and later: use -D$1<related_CMake_variable>=\"\$(usex $2)\" instead"
local uper capitalised x
[[ -z $2 ]] && die "cmake-utils_use-$1 <USE flag> [<flag name>]"
if [[ ! -z $3 ]]; then
# user specified the use name so use it
echo "-D$1$3=$(use $2 && echo ON || echo OFF)"
else
# use all various most used combinations
uper=$(echo ${2} | tr '[:lower:]' '[:upper:]')
capitalised=$(echo ${2} | sed 's/\<\(.\)\([^ ]*\)/\u\1\L\2/g')
for x in $2 $uper $capitalised; do
echo "-D$1$x=$(use $2 && echo ON || echo OFF) "
done
fi
}
_cmake_use_me_now_inverted() {
debug-print-function ${FUNCNAME} "$@"
local arg=$2
[[ ! -z $3 ]] && arg=$3
if [[ ${EAPI} != 5 && "${FUNCNAME[1]}" != cmake-utils_use_find_package ]] ; then
die "${FUNCNAME[1]} is banned in EAPI 6 and later: use -D$1<related_CMake_variable>=\"\$(usex $2)\" instead"
fi
local uper capitalised x
[[ -z $2 ]] && die "cmake-utils_use-$1 <USE flag> [<flag name>]"
if [[ ! -z $3 ]]; then
# user specified the use name so use it
echo "-D$1$3=$(use $2 && echo OFF || echo ON)"
else
# use all various most used combinations
uper=$(echo ${2} | tr '[:lower:]' '[:upper:]')
capitalised=$(echo ${2} | sed 's/\<\(.\)\([^ ]*\)/\u\1\L\2/g')
for x in $2 $uper $capitalised; do
echo "-D$1$x=$(use $2 && echo OFF || echo ON) "
done
fi
}
# Determine using IN or OUT source build
_cmake_check_build_dir() {
: ${CMAKE_USE_DIR:=${S}}
if [[ -n ${CMAKE_IN_SOURCE_BUILD} ]]; then
# we build in source dir
BUILD_DIR="${CMAKE_USE_DIR}"
else
# Respect both the old variable and the new one, depending
# on which one was set by the ebuild.
if [[ ! ${BUILD_DIR} && ${CMAKE_BUILD_DIR} ]]; then
if [[ ${EAPI} != [56] ]]; then
eerror "The CMAKE_BUILD_DIR variable has been renamed to BUILD_DIR."
die "The ebuild must be migrated to BUILD_DIR."
else
eqawarn "The CMAKE_BUILD_DIR variable has been renamed to BUILD_DIR."
eqawarn "Please migrate the ebuild to use the new one."
fi
# In the next call, both variables will be set already
# and we'd have to know which one takes precedence.
_RESPECT_CMAKE_BUILD_DIR=1
fi
if [[ ${_RESPECT_CMAKE_BUILD_DIR} ]]; then
BUILD_DIR=${CMAKE_BUILD_DIR:-${WORKDIR}/${P}_build}
else
: ${BUILD_DIR:=${WORKDIR}/${P}_build}
fi
fi
# Backwards compatibility for getting the value.
[[ ${EAPI} == [56] ]] && CMAKE_BUILD_DIR=${BUILD_DIR}
mkdir -p "${BUILD_DIR}" || die
echo ">>> Working in BUILD_DIR: \"$BUILD_DIR\""
}
# Determine which generator to use
_cmake_generator_to_use() {
local generator_name
case ${CMAKE_MAKEFILE_GENERATOR} in
ninja)
# if ninja is enabled but not installed, the build could fail
# this could happen if ninja is manually enabled (eg. make.conf) but not installed
case ${EAPI} in
5|6)
if ! ROOT=/ has_version dev-util/ninja; then
die "CMAKE_MAKEFILE_GENERATOR is set to ninja, but ninja is not installed. Please install dev-util/ninja or unset CMAKE_MAKEFILE_GENERATOR."
fi
;;
*)
if ! has_version -b dev-util/ninja; then
die "CMAKE_MAKEFILE_GENERATOR is set to ninja, but ninja is not installed. Please install dev-util/ninja or unset CMAKE_MAKEFILE_GENERATOR."
fi
;;
esac
generator_name="Ninja"
;;
emake)
generator_name="Unix Makefiles"
;;
*)
eerror "Unknown value for \${CMAKE_MAKEFILE_GENERATOR}"
die "Value ${CMAKE_MAKEFILE_GENERATOR} is not supported"
;;
esac
echo ${generator_name}
}
# @FUNCTION: cmake_comment_add_subdirectory
# @USAGE: <subdirectory>
# @DESCRIPTION:
# Comment out one or more add_subdirectory calls in CMakeLists.txt in the current directory
cmake_comment_add_subdirectory() {
if [[ -z ${1} ]]; then
die "comment_add_subdirectory must be passed at least one directory name to comment"
fi
if [[ -e "CMakeLists.txt" ]]; then
local d
for d in $@; do
sed -e "/add_subdirectory[[:space:]]*([[:space:]]*${d//\//\\/}[[:space:]]*)/I s/^/#DONOTCOMPILE /" \
-i CMakeLists.txt || die "failed to comment add_subdirectory(${d})"
done
fi
}
# @FUNCTION: comment_add_subdirectory
# @USAGE: <subdirectory>
# @DESCRIPTION:
# Comment out an add_subdirectory call in CMakeLists.txt in the current directory
# Banned in EAPI 6 and later - use cmake_comment_add_subdirectory instead.
comment_add_subdirectory() {
[[ ${EAPI} == 5 ]] || die "comment_add_subdirectory is banned in EAPI 6 and later - use cmake_comment_add_subdirectory instead"
cmake_comment_add_subdirectory "$@"
}
# @FUNCTION: cmake-utils_use_with
# @USAGE: <USE flag> [flag name]
# @DESCRIPTION:
# Based on use_with. See ebuild(5).
#
# `cmake-utils_use_with foo FOO` echoes -DWITH_FOO=ON if foo is enabled
# and -DWITH_FOO=OFF if it is disabled.
cmake-utils_use_with() { _cmake_use_me_now WITH_ "$@" ; }
# @FUNCTION: cmake-utils_use_enable
# @USAGE: <USE flag> [flag name]
# @DESCRIPTION:
# Based on use_enable. See ebuild(5).
#
# `cmake-utils_use_enable foo FOO` echoes -DENABLE_FOO=ON if foo is enabled
# and -DENABLE_FOO=OFF if it is disabled.
cmake-utils_use_enable() { _cmake_use_me_now ENABLE_ "$@" ; }
# @FUNCTION: cmake-utils_use_find_package
# @USAGE: <USE flag> <package name>
# @DESCRIPTION:
# Based on use_enable. See ebuild(5).
#
# `cmake-utils_use_find_package foo LibFoo` echoes -DCMAKE_DISABLE_FIND_PACKAGE_LibFoo=OFF
# if foo is enabled and -DCMAKE_DISABLE_FIND_PACKAGE_LibFoo=ON if it is disabled.
# This can be used to make find_package optional.
cmake-utils_use_find_package() {
if [[ ${EAPI} != 5 && "$#" != 2 ]] ; then
die "Usage: cmake-utils_use_find_package <USE flag> <package name>"
fi
_cmake_use_me_now_inverted CMAKE_DISABLE_FIND_PACKAGE_ "$@" ;
}
# @FUNCTION: cmake_use_find_package
# @USAGE: <USE flag> <package name>
# @DESCRIPTION:
# Alias for cmake-utils_use_find_package.
cmake_use_find_package() {
if [[ "$#" != 2 ]] ; then
die "Usage: cmake_use_find_package <USE flag> <package name>"
fi
cmake-utils_use_find_package "$@" ;
}
# @FUNCTION: cmake-utils_use_disable
# @USAGE: <USE flag> [flag name]
# @DESCRIPTION:
# Based on inversion of use_enable. See ebuild(5).
#
# `cmake-utils_use_enable foo FOO` echoes -DDISABLE_FOO=OFF if foo is enabled
# and -DDISABLE_FOO=ON if it is disabled.
cmake-utils_use_disable() { _cmake_use_me_now_inverted DISABLE_ "$@" ; }
# @FUNCTION: cmake-utils_use_no
# @USAGE: <USE flag> [flag name]
# @DESCRIPTION:
# Based on use_disable. See ebuild(5).
#
# `cmake-utils_use_no foo FOO` echoes -DNO_FOO=OFF if foo is enabled
# and -DNO_FOO=ON if it is disabled.
cmake-utils_use_no() { _cmake_use_me_now_inverted NO_ "$@" ; }
# @FUNCTION: cmake-utils_use_want
# @USAGE: <USE flag> [flag name]
# @DESCRIPTION:
# Based on use_enable. See ebuild(5).
#
# `cmake-utils_use_want foo FOO` echoes -DWANT_FOO=ON if foo is enabled
# and -DWANT_FOO=OFF if it is disabled.
cmake-utils_use_want() { _cmake_use_me_now WANT_ "$@" ; }
# @FUNCTION: cmake-utils_use_build
# @USAGE: <USE flag> [flag name]
# @DESCRIPTION:
# Based on use_enable. See ebuild(5).
#
# `cmake-utils_use_build foo FOO` echoes -DBUILD_FOO=ON if foo is enabled
# and -DBUILD_FOO=OFF if it is disabled.
cmake-utils_use_build() { _cmake_use_me_now BUILD_ "$@" ; }
# @FUNCTION: cmake-utils_use_has
# @USAGE: <USE flag> [flag name]
# @DESCRIPTION:
# Based on use_enable. See ebuild(5).
#
# `cmake-utils_use_has foo FOO` echoes -DHAVE_FOO=ON if foo is enabled
# and -DHAVE_FOO=OFF if it is disabled.
cmake-utils_use_has() { _cmake_use_me_now HAVE_ "$@" ; }
# @FUNCTION: cmake-utils_use_use
# @USAGE: <USE flag> [flag name]
# @DESCRIPTION:
# Based on use_enable. See ebuild(5).
#
# `cmake-utils_use_use foo FOO` echoes -DUSE_FOO=ON if foo is enabled
# and -DUSE_FOO=OFF if it is disabled.
cmake-utils_use_use() { _cmake_use_me_now USE_ "$@" ; }
# @FUNCTION: cmake-utils_use
# @USAGE: <USE flag> [flag name]
# @DESCRIPTION:
# Based on use_enable. See ebuild(5).
#
# `cmake-utils_use foo FOO` echoes -DFOO=ON if foo is enabled
# and -DFOO=OFF if it is disabled.
cmake-utils_use() { _cmake_use_me_now "" "$@" ; }
# @FUNCTION: cmake-utils_useno
# @USAGE: <USE flag> [flag name]
# @DESCRIPTION:
# Based on use_enable. See ebuild(5).
#
# `cmake-utils_useno foo NOFOO` echoes -DNOFOO=OFF if foo is enabled
# and -DNOFOO=ON if it is disabled.
cmake-utils_useno() { _cmake_use_me_now_inverted "" "$@" ; }
# Internal function for modifying hardcoded definitions.
# Removes dangerous definitions that override Gentoo settings.
_cmake_modify-cmakelists() {
debug-print-function ${FUNCNAME} "$@"
# Only edit the files once
grep -qs "<<< Gentoo configuration >>>" "${CMAKE_USE_DIR}"/CMakeLists.txt && return 0
# Comment out all set (<some_should_be_user_defined_variable> value)
find "${CMAKE_USE_DIR}" -name CMakeLists.txt -exec sed \
-e '/^[[:space:]]*set[[:space:]]*([[:space:]]*CMAKE_BUILD_TYPE\([[:space:]].*)\|)\)/I{s/^/#_cmake_modify_IGNORE /g}' \
-e '/^[[:space:]]*set[[:space:]]*([[:space:]]*CMAKE_COLOR_MAKEFILE[[:space:]].*)/I{s/^/#_cmake_modify_IGNORE /g}' \
-e '/^[[:space:]]*set[[:space:]]*([[:space:]]*CMAKE_INSTALL_PREFIX[[:space:]].*)/I{s/^/#_cmake_modify_IGNORE /g}' \
-e '/^[[:space:]]*set[[:space:]]*([[:space:]]*CMAKE_VERBOSE_MAKEFILE[[:space:]].*)/I{s/^/#_cmake_modify_IGNORE /g}' \
-i {} + || die "${LINENO}: failed to disable hardcoded settings"
local x
for x in $(find "${CMAKE_USE_DIR}" -name CMakeLists.txt -exec grep -l "^#_cmake_modify_IGNORE" {} +;); do
einfo "Hardcoded definition(s) removed in $(echo "${x}" | cut -c $((${#CMAKE_USE_DIR}+2))-):"
einfo "$(grep -se '^#_cmake_modify_IGNORE' ${x} | cut -c 22-99)"
done
# NOTE Append some useful summary here
cat >> "${CMAKE_USE_DIR}"/CMakeLists.txt <<- _EOF_ || die
MESSAGE(STATUS "<<< Gentoo configuration >>>
Build type \${CMAKE_BUILD_TYPE}
Install path \${CMAKE_INSTALL_PREFIX}
Compiler flags:
C \${CMAKE_C_FLAGS}
C++ \${CMAKE_CXX_FLAGS}
Linker flags:
Executable \${CMAKE_EXE_LINKER_FLAGS}
Module \${CMAKE_MODULE_LINKER_FLAGS}
Shared \${CMAKE_SHARED_LINKER_FLAGS}\n")
_EOF_
}
# temporary function for moving cmake cleanups from from src_configure -> src_prepare.
# bug #378850
_cmake_cleanup_cmake() {
: ${CMAKE_USE_DIR:=${S}}
if [[ "${CMAKE_REMOVE_MODULES}" == "yes" ]] ; then
local name
for name in ${CMAKE_REMOVE_MODULES_LIST} ; do
find "${S}" -name ${name}.cmake -exec rm -v {} + || die
done
fi
# check if CMakeLists.txt exist and if no then die
if [[ ! -e ${CMAKE_USE_DIR}/CMakeLists.txt ]] ; then
eerror "Unable to locate CMakeLists.txt under:"
eerror "\"${CMAKE_USE_DIR}/CMakeLists.txt\""
eerror "Consider not inheriting the cmake eclass."
die "FATAL: Unable to find CMakeLists.txt"
fi
# Remove dangerous things.
_cmake_modify-cmakelists
}
# @FUNCTION: cmake-utils_src_prepare
# @DESCRIPTION:
# Apply ebuild and user patches.
cmake-utils_src_prepare() {
debug-print-function ${FUNCNAME} "$@"
pushd "${S}" > /dev/null || die
if [[ ${EAPI} != 5 ]]; then
default_src_prepare
_cmake_cleanup_cmake
else
debug-print "$FUNCNAME: PATCHES=$PATCHES"
[[ ${PATCHES[@]} ]] && epatch "${PATCHES[@]}"
debug-print "$FUNCNAME: applying user patches"
epatch_user
fi
popd > /dev/null || die
# make ${S} read-only in order to detect broken build-systems
if [[ ${CMAKE_UTILS_QA_SRC_DIR_READONLY} && ! ${CMAKE_IN_SOURCE_BUILD} ]]; then
chmod -R a-w "${S}"
fi
_CMAKE_UTILS_SRC_PREPARE_HAS_RUN=1
}
# @VARIABLE: mycmakeargs
# @DEFAULT_UNSET
# @DESCRIPTION:
# Optional cmake defines as a bash array. Should be defined before calling
# src_configure.
# @CODE
# src_configure() {
# local mycmakeargs=(
# $(cmake-utils_use_with openconnect)
# )
#
# cmake-utils_src_configure
# }
# @CODE
# @FUNCTION: cmake-utils_src_configure
# @DESCRIPTION:
# General function for configuring with cmake. Default behaviour is to start an
# out-of-source build.
cmake-utils_src_configure() {
debug-print-function ${FUNCNAME} "$@"
if [[ ! ${_CMAKE_UTILS_SRC_PREPARE_HAS_RUN} ]]; then
if [[ ${EAPI} != [56] ]]; then
die "FATAL: cmake-utils_src_prepare has not been run"
else
eqawarn "cmake-utils_src_prepare has not been run, please open a bug on https://bugs.gentoo.org/"
fi
fi
[[ ${EAPI} == 5 ]] && _cmake_cleanup_cmake
_cmake_check_build_dir
# Fix xdg collision with sandbox
xdg_environment_reset
# @SEE CMAKE_BUILD_TYPE
if [[ ${CMAKE_BUILD_TYPE} = Gentoo ]]; then
# Handle release builds
if ! has debug ${IUSE//+} || ! use debug; then
local CPPFLAGS=${CPPFLAGS}
append-cppflags -DNDEBUG
fi
fi
# Prepare Gentoo override rules (set valid compiler, append CPPFLAGS etc.)
local build_rules=${BUILD_DIR}/gentoo_rules.cmake
cat > "${build_rules}" <<- _EOF_ || die
SET (CMAKE_ASM_COMPILE_OBJECT "<CMAKE_ASM_COMPILER> <DEFINES> <INCLUDES> ${CPPFLAGS} <FLAGS> -o <OBJECT> -c <SOURCE>" CACHE STRING "ASM compile command" FORCE)
SET (CMAKE_ASM-ATT_COMPILE_OBJECT "<CMAKE_ASM-ATT_COMPILER> <DEFINES> <INCLUDES> ${CPPFLAGS} <FLAGS> -o <OBJECT> -c -x assembler <SOURCE>" CACHE STRING "ASM-ATT compile command" FORCE)
SET (CMAKE_ASM-ATT_LINK_FLAGS "-nostdlib" CACHE STRING "ASM-ATT link flags" FORCE)
SET (CMAKE_C_COMPILE_OBJECT "<CMAKE_C_COMPILER> <DEFINES> <INCLUDES> ${CPPFLAGS} <FLAGS> -o <OBJECT> -c <SOURCE>" CACHE STRING "C compile command" FORCE)
SET (CMAKE_CXX_COMPILE_OBJECT "<CMAKE_CXX_COMPILER> <DEFINES> <INCLUDES> ${CPPFLAGS} <FLAGS> -o <OBJECT> -c <SOURCE>" CACHE STRING "C++ compile command" FORCE)
SET (CMAKE_Fortran_COMPILE_OBJECT "<CMAKE_Fortran_COMPILER> <DEFINES> <INCLUDES> ${FCFLAGS} <FLAGS> -o <OBJECT> -c <SOURCE>" CACHE STRING "Fortran compile command" FORCE)
_EOF_
local myCC=$(tc-getCC) myCXX=$(tc-getCXX) myFC=$(tc-getFC)
# !!! IMPORTANT NOTE !!!
# Single slash below is intentional. CMake is weird and wants the
# CMAKE_*_VARIABLES split into two elements: the first one with
# compiler path, and the second one with all command-line options,
# space separated.
local toolchain_file=${BUILD_DIR}/gentoo_toolchain.cmake
cat > ${toolchain_file} <<- _EOF_ || die
SET (CMAKE_ASM_COMPILER "${myCC/ /;}")
SET (CMAKE_ASM-ATT_COMPILER "${myCC/ /;}")
SET (CMAKE_C_COMPILER "${myCC/ /;}")
SET (CMAKE_CXX_COMPILER "${myCXX/ /;}")
SET (CMAKE_Fortran_COMPILER "${myFC/ /;}")
SET (CMAKE_AR $(type -P $(tc-getAR)) CACHE FILEPATH "Archive manager" FORCE)
SET (CMAKE_RANLIB $(type -P $(tc-getRANLIB)) CACHE FILEPATH "Archive index generator" FORCE)
SET (CMAKE_SYSTEM_PROCESSOR "${CHOST%%-*}")
_EOF_
# We are using the C compiler for assembly by default.
local -x ASMFLAGS=${CFLAGS}
local -x PKG_CONFIG=$(tc-getPKG_CONFIG)
if tc-is-cross-compiler; then
local sysname
case "${KERNEL:-linux}" in
Cygwin) sysname="CYGWIN_NT-5.1" ;;
HPUX) sysname="HP-UX" ;;
linux) sysname="Linux" ;;
Winnt)
sysname="Windows"
cat >> "${toolchain_file}" <<- _EOF_ || die
SET (CMAKE_RC_COMPILER $(tc-getRC))
_EOF_
;;
*) sysname="${KERNEL}" ;;
esac
cat >> "${toolchain_file}" <<- _EOF_ || die
SET (CMAKE_SYSTEM_NAME "${sysname}")
_EOF_
if [ "${SYSROOT:-/}" != "/" ] ; then
# When cross-compiling with a sysroot (e.g. with crossdev's emerge wrappers)
# we need to tell cmake to use libs/headers from the sysroot but programs from / only.
cat >> "${toolchain_file}" <<- _EOF_ || die
SET (CMAKE_FIND_ROOT_PATH "${SYSROOT}")
SET (CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
SET (CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
SET (CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
_EOF_
fi
fi
if use prefix-guest; then
cat >> "${build_rules}" <<- _EOF_ || die
# in Prefix we need rpath and must ensure cmake gets our default linker path
# right ... except for Darwin hosts
IF (NOT APPLE)
SET (CMAKE_SKIP_RPATH OFF CACHE BOOL "" FORCE)
SET (CMAKE_PLATFORM_REQUIRED_RUNTIME_PATH "${EPREFIX}/usr/${CHOST}/lib/gcc;${EPREFIX}/usr/${CHOST}/lib;${EPREFIX}/usr/$(get_libdir);${EPREFIX}/$(get_libdir)"
CACHE STRING "" FORCE)
ELSE ()
SET (CMAKE_PREFIX_PATH "${EPREFIX}/usr" CACHE STRING "" FORCE)
SET (CMAKE_MACOSX_RPATH ON CACHE BOOL "" FORCE)
SET (CMAKE_SKIP_BUILD_RPATH OFF CACHE BOOL "" FORCE)
SET (CMAKE_SKIP_RPATH OFF CACHE BOOL "" FORCE)
SET (CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE CACHE BOOL "" FORCE)
ENDIF (NOT APPLE)
_EOF_
fi
# Common configure parameters (invariants)
local common_config=${BUILD_DIR}/gentoo_common_config.cmake
local libdir=$(get_libdir)
cat > "${common_config}" <<- _EOF_ || die
SET (CMAKE_GENTOO_BUILD ON CACHE BOOL "Indicate Gentoo package build")
SET (LIB_SUFFIX ${libdir/lib} CACHE STRING "library path suffix" FORCE)
SET (CMAKE_INSTALL_LIBDIR ${libdir} CACHE PATH "Output directory for libraries")
SET (CMAKE_INSTALL_INFODIR "${EPREFIX}/usr/share/info" CACHE PATH "")
SET (CMAKE_INSTALL_MANDIR "${EPREFIX}/usr/share/man" CACHE PATH "")
SET (CMAKE_USER_MAKE_RULES_OVERRIDE "${build_rules}" CACHE FILEPATH "Gentoo override rules")
_EOF_
# See bug 689410
if [[ "${ARCH}" == riscv ]]; then
echo 'SET (CMAKE_FIND_LIBRARY_CUSTOM_LIB_SUFFIX '"${libdir#lib}"' CACHE STRING "library search suffix" FORCE)' >> "${common_config}" || die
fi
[[ "${NOCOLOR}" = true || "${NOCOLOR}" = yes ]] && echo 'SET (CMAKE_COLOR_MAKEFILE OFF CACHE BOOL "pretty colors during make" FORCE)' >> "${common_config}"
if [[ ${EAPI} != [56] ]]; then
cat >> "${common_config}" <<- _EOF_ || die
SET (CMAKE_INSTALL_DOCDIR "${EPREFIX}/usr/share/doc/${PF}" CACHE PATH "")
SET (BUILD_SHARED_LIBS ON CACHE BOOL "")
_EOF_
fi
# Wipe the default optimization flags out of CMake
if [[ ${CMAKE_BUILD_TYPE} != Gentoo && ${EAPI} != 5 ]]; then
cat >> ${common_config} <<- _EOF_ || die
SET (CMAKE_ASM_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
SET (CMAKE_ASM-ATT_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
SET (CMAKE_C_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
SET (CMAKE_CXX_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
SET (CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
SET (CMAKE_EXE_LINKER_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
SET (CMAKE_MODULE_LINKER_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
SET (CMAKE_SHARED_LINKER_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
SET (CMAKE_STATIC_LINKER_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
_EOF_
fi
# Convert mycmakeargs to an array, for backwards compatibility
# Make the array a local variable since <=portage-2.1.6.x does not
# support global arrays (see bug #297255).
local mycmakeargstype=$(declare -p mycmakeargs 2>&-)
if [[ "${mycmakeargstype}" != "declare -a mycmakeargs="* ]]; then
if [[ -n "${mycmakeargstype}" ]] ; then
if [[ ${EAPI} == 5 ]]; then
eqawarn "Declaring mycmakeargs as a variable is deprecated. Please use an array instead."
else
die "Declaring mycmakeargs as a variable is banned in EAPI=${EAPI}. Please use an array instead."
fi
fi
local mycmakeargs_local=(${mycmakeargs})
else
local mycmakeargs_local=("${mycmakeargs[@]}")
fi
if [[ ${CMAKE_WARN_UNUSED_CLI} == no ]] ; then
local warn_unused_cli="--no-warn-unused-cli"
else
local warn_unused_cli=""
fi
# Common configure parameters (overridable)
# NOTE CMAKE_BUILD_TYPE can be only overridden via CMAKE_BUILD_TYPE eclass variable
# No -DCMAKE_BUILD_TYPE=xxx definitions will be in effect.
local cmakeargs=(
${warn_unused_cli}
-C "${common_config}"
-G "$(_cmake_generator_to_use)"
-DCMAKE_INSTALL_PREFIX="${EPREFIX}/usr"
"${mycmakeargs_local[@]}"
-DCMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE}"
$([[ ${EAPI} == 5 ]] && echo -DCMAKE_INSTALL_DO_STRIP=OFF)
-DCMAKE_TOOLCHAIN_FILE="${toolchain_file}"
"${MYCMAKEARGS}"
)
if [[ -n "${CMAKE_EXTRA_CACHE_FILE}" ]] ; then
cmakeargs+=( -C "${CMAKE_EXTRA_CACHE_FILE}" )
fi
pushd "${BUILD_DIR}" > /dev/null || die
debug-print "${LINENO} ${ECLASS} ${FUNCNAME}: mycmakeargs is ${mycmakeargs_local[*]}"
echo "${CMAKE_BINARY}" "${cmakeargs[@]}" "${CMAKE_USE_DIR}"
"${CMAKE_BINARY}" "${cmakeargs[@]}" "${CMAKE_USE_DIR}" || die "cmake failed"
popd > /dev/null || die
}
# @FUNCTION: cmake-utils_src_compile
# @DESCRIPTION:
# General function for compiling with cmake.
# Automatically detects the build type. All arguments are passed to emake.
cmake-utils_src_compile() {
debug-print-function ${FUNCNAME} "$@"
cmake-utils_src_make "$@"
}
# @FUNCTION: _cmake_ninja_src_make
# @INTERNAL
# @DESCRIPTION:
# Build the package using ninja generator
_cmake_ninja_src_make() {
debug-print-function ${FUNCNAME} "$@"
[[ -e build.ninja ]] || die "build.ninja not found. Error during configure stage."
eninja "$@"
}
# @FUNCTION: _cmake_emake_src_make
# @INTERNAL
# @DESCRIPTION:
# Build the package using make generator
_cmake_emake_src_make() {
debug-print-function ${FUNCNAME} "$@"
[[ -e Makefile ]] || die "Makefile not found. Error during configure stage."
if [[ "${CMAKE_VERBOSE}" != "OFF" ]]; then
emake VERBOSE=1 "$@" || die
else
emake "$@" || die
fi
}
# @FUNCTION: cmake-utils_src_make
# @DESCRIPTION:
# Function for building the package. Automatically detects the build type.
# All arguments are passed to emake.
cmake-utils_src_make() {
debug-print-function ${FUNCNAME} "$@"
_cmake_check_build_dir
pushd "${BUILD_DIR}" > /dev/null || die
_cmake_${CMAKE_MAKEFILE_GENERATOR}_src_make "$@"
popd > /dev/null || die
}
# @FUNCTION: cmake-utils_src_test
# @DESCRIPTION:
# Function for testing the package. Automatically detects the build type.
cmake-utils_src_test() {
debug-print-function ${FUNCNAME} "$@"
_cmake_check_build_dir
pushd "${BUILD_DIR}" > /dev/null || die
[[ -e CTestTestfile.cmake ]] || { echo "No tests found. Skipping."; return 0 ; }
[[ -n ${TEST_VERBOSE} ]] && myctestargs+=( --extra-verbose --output-on-failure )
set -- ctest -j "$(makeopts_jobs)" --test-load "$(makeopts_loadavg)" "${myctestargs[@]}" "$@"
echo "$@" >&2
if "$@" ; then
einfo "Tests succeeded."
popd > /dev/null || die
return 0
else
if [[ -n "${CMAKE_YES_I_WANT_TO_SEE_THE_TEST_LOG}" ]] ; then
# on request from Diego
eerror "Tests failed. Test log ${BUILD_DIR}/Testing/Temporary/LastTest.log follows:"
eerror "--START TEST LOG--------------------------------------------------------------"
cat "${BUILD_DIR}/Testing/Temporary/LastTest.log"
eerror "--END TEST LOG----------------------------------------------------------------"
die "Tests failed."
else
die "Tests failed. When you file a bug, please attach the following file: \n\t${BUILD_DIR}/Testing/Temporary/LastTest.log"
fi
# die might not die due to nonfatal
popd > /dev/null || die
return 1
fi
}
# @FUNCTION: cmake-utils_src_install
# @DESCRIPTION:
# Function for installing the package. Automatically detects the build type.
cmake-utils_src_install() {
debug-print-function ${FUNCNAME} "$@"
_cmake_check_build_dir
pushd "${BUILD_DIR}" > /dev/null || die
DESTDIR="${D}" ${CMAKE_MAKEFILE_GENERATOR} install "$@" || die "died running ${CMAKE_MAKEFILE_GENERATOR} install"
popd > /dev/null || die
pushd "${S}" > /dev/null || die
einstalldocs
popd > /dev/null || die
}
fi

660
eclass/cmake.eclass Normal file
View File

@ -0,0 +1,660 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: cmake.eclass
# @MAINTAINER:
# kde@gentoo.org
# @AUTHOR:
# Tomáš Chvátal <scarabeus@gentoo.org>
# Maciej Mrozowski <reavertm@gentoo.org>
# (undisclosed contributors)
# Original author: Zephyrus (zephyrus@mirach.it)
# @SUPPORTED_EAPIS: 7
# @BLURB: common ebuild functions for cmake-based packages
# @DESCRIPTION:
# The cmake eclass makes creating ebuilds for cmake-based packages much easier.
# It provides all inherited features (DOCS, HTML_DOCS, PATCHES) along with
# out-of-source builds (default), in-source builds and an implementation of the
# well-known use_enable function for CMake.
if [[ -z ${_CMAKE_ECLASS} ]]; then
_CMAKE_ECLASS=1
# @ECLASS-VARIABLE: BUILD_DIR
# @DESCRIPTION:
# Build directory where all cmake processed files should be generated.
# For in-source build it's fixed to ${CMAKE_USE_DIR}.
# For out-of-source build it can be overridden, by default it uses
# ${WORKDIR}/${P}_build.
: ${BUILD_DIR:=${WORKDIR}/${P}_build}
# @ECLASS-VARIABLE: CMAKE_BINARY
# @DESCRIPTION:
# Eclass can use different cmake binary than the one provided in by system.
: ${CMAKE_BINARY:=cmake}
# @ECLASS-VARIABLE: CMAKE_BUILD_TYPE
# @DESCRIPTION:
# Set to override default CMAKE_BUILD_TYPE. Only useful for packages
# known to make use of "if (CMAKE_BUILD_TYPE MATCHES xxx)".
# If about to be set - needs to be set before invoking cmake_src_configure.
# You usually do *NOT* want nor need to set it as it pulls CMake default
# build-type specific compiler flags overriding make.conf.
: ${CMAKE_BUILD_TYPE:=Gentoo}
# @ECLASS-VARIABLE: CMAKE_IN_SOURCE_BUILD
# @DEFAULT_UNSET
# @DESCRIPTION:
# Set to enable in-source build.
# @ECLASS-VARIABLE: CMAKE_MAKEFILE_GENERATOR
# @DEFAULT_UNSET
# @DESCRIPTION:
# Specify a makefile generator to be used by cmake.
# At this point only "emake" and "ninja" are supported.
# The default is set to "ninja".
: ${CMAKE_MAKEFILE_GENERATOR:=ninja}
# @ECLASS-VARIABLE: CMAKE_REMOVE_MODULES_LIST
# @DESCRIPTION:
# Array of CMake modules that will be removed in $S during src_prepare,
# in order to force packages to use the system version.
# Set to "none" to disable removing modules entirely.
: ${CMAKE_REMOVE_MODULES_LIST:=FindBLAS FindLAPACK}
# @ECLASS-VARIABLE: CMAKE_USE_DIR
# @DESCRIPTION:
# Sets the directory where we are working with cmake, for example when
# application uses autotools and only one plugin needs to be done by cmake.
# By default it uses ${S}.
# @ECLASS-VARIABLE: CMAKE_VERBOSE
# @DESCRIPTION:
# Set to OFF to disable verbose messages during compilation
: ${CMAKE_VERBOSE:=ON}
# @ECLASS-VARIABLE: CMAKE_WARN_UNUSED_CLI
# @DESCRIPTION:
# Warn about variables that are declared on the command line
# but not used. Might give false-positives.
# "no" to disable (default) or anything else to enable.
: ${CMAKE_WARN_UNUSED_CLI:=yes}
# @ECLASS-VARIABLE: CMAKE_EXTRA_CACHE_FILE
# @DEFAULT_UNSET
# @DESCRIPTION:
# Specifies an extra cache file to pass to cmake. This is the analog of EXTRA_ECONF
# for econf and is needed to pass TRY_RUN results when cross-compiling.
# Should be set by user in a per-package basis in /etc/portage/package.env.
# @ECLASS-VARIABLE: CMAKE_QA_SRC_DIR_READONLY
# @DEFAULT_UNSET
# @DESCRIPTION:
# After running cmake_src_prepare, sets ${S} to read-only. This is
# a user flag and should under _no circumstances_ be set in the ebuild.
# Helps in improving QA of build systems that write to source tree.
case ${EAPI} in
7) ;;
*) die "EAPI=${EAPI:-0} is not supported" ;;
esac
inherit toolchain-funcs ninja-utils flag-o-matic multiprocessing xdg-utils
EXPORT_FUNCTIONS src_prepare src_configure src_compile src_test src_install
[[ ${CMAKE_MIN_VERSION} ]] && die "CMAKE_MIN_VERSION is banned; if necessary, set BDEPEND=\">=dev-util/cmake-${CMAKE_MIN_VERSION}\" directly"
[[ ${CMAKE_BUILD_DIR} ]] && die "The ebuild must be migrated to BUILD_DIR"
[[ ${CMAKE_REMOVE_MODULES} ]] && die "CMAKE_REMOVE_MODULES is banned, set CMAKE_REMOVE_MODULES_LIST=\"\" instead"
[[ ${CMAKE_UTILS_QA_SRC_DIR_READONLY} ]] && die "Use CMAKE_QA_SRC_DIR_READONLY instead"
[[ ${WANT_CMAKE} ]] && die "WANT_CMAKE has been removed and is a no-op"
[[ ${PREFIX} ]] && die "PREFIX has been removed and is a no-op"
case ${CMAKE_MAKEFILE_GENERATOR} in
emake)
BDEPEND="sys-devel/make"
;;
ninja)
BDEPEND="dev-util/ninja"
;;
*)
eerror "Unknown value for \${CMAKE_MAKEFILE_GENERATOR}"
die "Value ${CMAKE_MAKEFILE_GENERATOR} is not supported"
;;
esac
if [[ ${PN} != cmake ]]; then
BDEPEND+=" dev-util/cmake"
fi
# @FUNCTION: _cmake_banned_func
# @INTERNAL
# @DESCRIPTION:
# Banned functions are banned.
_cmake_banned_func() {
die "${FUNCNAME[1]} is banned. use -D$1<related_CMake_variable>=\"\$(usex $2)\" instead"
}
# Determine using IN or OUT source build
_cmake_check_build_dir() {
: ${CMAKE_USE_DIR:=${S}}
if [[ -n ${CMAKE_IN_SOURCE_BUILD} ]]; then
# we build in source dir
BUILD_DIR="${CMAKE_USE_DIR}"
fi
mkdir -p "${BUILD_DIR}" || die
einfo "Working in BUILD_DIR: \"$BUILD_DIR\""
}
# @FUNCTION: cmake_run_in
# @USAGE: <working dir> <run command>
# @DESCRIPTION:
# Set the desired working dir for a function or command.
cmake_run_in() {
if [[ -z ${2} ]]; then
die "${FUNCNAME[0]} must be passed at least two arguments"
fi
[[ -e ${1} ]] || die "${FUNCNAME[0]}: Nonexistent path: ${1}"
pushd ${1} > /dev/null || die
"${@:2}"
popd > /dev/null || die
}
# @FUNCTION: cmake_comment_add_subdirectory
# @USAGE: <subdirectory>
# @DESCRIPTION:
# Comment out one or more add_subdirectory calls in CMakeLists.txt in the current directory
cmake_comment_add_subdirectory() {
if [[ -z ${1} ]]; then
die "${FUNCNAME[0]} must be passed at least one directory name to comment"
fi
[[ -e "CMakeLists.txt" ]] || return
local d
for d in $@; do
d=${d//\//\\/}
sed -e "/add_subdirectory[[:space:]]*([[:space:]]*${d}[[:space:]]*)/I s/^/#DONOTCOMPILE /" \
-i CMakeLists.txt || die "failed to comment add_subdirectory(${d})"
done
}
# @FUNCTION: comment_add_subdirectory
# @INTERNAL
# @DESCRIPTION:
# Banned. Use cmake_comment_add_subdirectory instead.
comment_add_subdirectory() {
die "comment_add_subdirectory is banned. Use cmake_comment_add_subdirectory instead"
}
# @FUNCTION: cmake-utils_use_with
# @INTERNAL
# @DESCRIPTION:
# Banned. Use -DWITH_FOO=$(usex foo) instead.
cmake-utils_use_with() { _cmake_banned_func WITH_ "$@" ; }
# @FUNCTION: cmake-utils_use_enable
# @INTERNAL
# @DESCRIPTION:
# Banned. Use -DENABLE_FOO=$(usex foo) instead.
cmake-utils_use_enable() { _cmake_banned_func ENABLE_ "$@" ; }
# @FUNCTION: cmake_use_find_package
# @USAGE: <USE flag> <package name>
# @DESCRIPTION:
# Based on use_enable. See ebuild(5).
#
# `cmake_use_find_package foo LibFoo` echoes -DCMAKE_DISABLE_FIND_PACKAGE_LibFoo=OFF
# if foo is enabled and -DCMAKE_DISABLE_FIND_PACKAGE_LibFoo=ON if it is disabled.
# This can be used to make find_package optional.
cmake_use_find_package() {
debug-print-function ${FUNCNAME} "$@"
if [[ "$#" != 2 || -z $1 ]] ; then
die "Usage: cmake_use_find_package <USE flag> <package name>"
fi
echo "-DCMAKE_DISABLE_FIND_PACKAGE_$2=$(use $1 && echo OFF || echo ON)"
}
# @FUNCTION: cmake-utils_use_disable
# @INTERNAL
# @DESCRIPTION:
# Banned. Use -DDISABLE_FOO=$(usex !foo) instead.
cmake-utils_use_disable() { _cmake_banned_func DISABLE_ "$@" ; }
# @FUNCTION: cmake-utils_use_no
# @INTERNAL
# @DESCRIPTION:
# Banned. Use -DNO_FOO=$(usex !foo) instead.
cmake-utils_use_no() { _cmake_banned_func NO_ "$@" ; }
# @FUNCTION: cmake-utils_use_want
# @INTERNAL
# @DESCRIPTION:
# Banned. Use -DWANT_FOO=$(usex foo) instead.
cmake-utils_use_want() { _cmake_banned_func WANT_ "$@" ; }
# @FUNCTION: cmake-utils_use_build
# @INTERNAL
# @DESCRIPTION:
# Banned. Use -DBUILD_FOO=$(usex foo) instead.
cmake-utils_use_build() { _cmake_banned_func BUILD_ "$@" ; }
# @FUNCTION: cmake-utils_use_has
# @INTERNAL
# @DESCRIPTION:
# Banned. Use -DHAVE_FOO=$(usex foo) instead.
cmake-utils_use_has() { _cmake_banned_func HAVE_ "$@" ; }
# @FUNCTION: cmake-utils_use_use
# @INTERNAL
# @DESCRIPTION:
# Banned. Use -DUSE_FOO=$(usex foo) instead.
cmake-utils_use_use() { _cmake_banned_func USE_ "$@" ; }
# @FUNCTION: cmake-utils_use
# @INTERNAL
# @DESCRIPTION:
# Banned. Use -DFOO=$(usex foo) instead.
cmake-utils_use() { _cmake_banned_func "" "$@" ; }
# @FUNCTION: cmake-utils_useno
# @INTERNAL
# @DESCRIPTION:
# Banned. Use -DNOFOO=$(usex !foo) instead.
cmake-utils_useno() { _cmake_banned_func "" "$@" ; }
# Internal function for modifying hardcoded definitions.
# Removes dangerous definitions that override Gentoo settings.
_cmake_modify-cmakelists() {
debug-print-function ${FUNCNAME} "$@"
# Only edit the files once
grep -qs "<<< Gentoo configuration >>>" "${CMAKE_USE_DIR}"/CMakeLists.txt && return 0
# Comment out all set (<some_should_be_user_defined_variable> value)
find "${CMAKE_USE_DIR}" -name CMakeLists.txt -exec sed \
-e '/^[[:space:]]*set[[:space:]]*([[:space:]]*CMAKE_BUILD_TYPE\([[:space:]].*)\|)\)/I{s/^/#_cmake_modify_IGNORE /g}' \
-e '/^[[:space:]]*set[[:space:]]*([[:space:]]*CMAKE_COLOR_MAKEFILE[[:space:]].*)/I{s/^/#_cmake_modify_IGNORE /g}' \
-e '/^[[:space:]]*set[[:space:]]*([[:space:]]*CMAKE_INSTALL_PREFIX[[:space:]].*)/I{s/^/#_cmake_modify_IGNORE /g}' \
-e '/^[[:space:]]*set[[:space:]]*([[:space:]]*CMAKE_VERBOSE_MAKEFILE[[:space:]].*)/I{s/^/#_cmake_modify_IGNORE /g}' \
-i {} + || die "${LINENO}: failed to disable hardcoded settings"
local x
for x in $(find "${CMAKE_USE_DIR}" -name CMakeLists.txt -exec grep -l "^#_cmake_modify_IGNORE" {} +;); do
einfo "Hardcoded definition(s) removed in $(echo "${x}" | cut -c $((${#CMAKE_USE_DIR}+2))-):"
einfo "$(grep -se '^#_cmake_modify_IGNORE' ${x} | cut -c 22-99)"
done
# NOTE Append some useful summary here
cat >> "${CMAKE_USE_DIR}"/CMakeLists.txt <<- _EOF_ || die
message(STATUS "<<< Gentoo configuration >>>
Build type \${CMAKE_BUILD_TYPE}
Install path \${CMAKE_INSTALL_PREFIX}
Compiler flags:
C \${CMAKE_C_FLAGS}
C++ \${CMAKE_CXX_FLAGS}
Linker flags:
Executable \${CMAKE_EXE_LINKER_FLAGS}
Module \${CMAKE_MODULE_LINKER_FLAGS}
Shared \${CMAKE_SHARED_LINKER_FLAGS}\n")
_EOF_
}
# @FUNCTION: cmake_src_prepare
# @DESCRIPTION:
# Apply ebuild and user patches.
cmake_src_prepare() {
debug-print-function ${FUNCNAME} "$@"
# FIXME: workaround from cmake-utils; use current working directory instead, bug #704524
# esp. test with 'special' pkgs like: app-arch/brotli, media-gfx/gmic, net-libs/quiche
pushd "${S}" > /dev/null || die
default_src_prepare
_cmake_check_build_dir
# check if CMakeLists.txt exist and if no then die
if [[ ! -e ${CMAKE_USE_DIR}/CMakeLists.txt ]] ; then
eerror "Unable to locate CMakeLists.txt under:"
eerror "\"${CMAKE_USE_DIR}/CMakeLists.txt\""
eerror "Consider not inheriting the cmake eclass."
die "FATAL: Unable to find CMakeLists.txt"
fi
# if ninja is enabled but not installed, the build could fail
# this could happen if ninja is manually enabled (eg. make.conf) but not installed
if [[ ${CMAKE_MAKEFILE_GENERATOR} == ninja ]] && ! has_version -b dev-util/ninja; then
eerror "CMAKE_MAKEFILE_GENERATOR is set to ninja, but ninja is not installed."
die "Please install dev-util/ninja or unset CMAKE_MAKEFILE_GENERATOR."
fi
local modules_list
if [[ $(declare -p CMAKE_REMOVE_MODULES_LIST) == "declare -a"* ]]; then
modules_list=( "${CMAKE_REMOVE_MODULES_LIST[@]}" )
else
modules_list=( ${CMAKE_REMOVE_MODULES_LIST} )
fi
local name
for name in "${modules_list[@]}" ; do
find "${S}" -name ${name}.cmake -exec rm -v {} + || die
done
# Remove dangerous things.
_cmake_modify-cmakelists
popd > /dev/null || die
# make ${S} read-only in order to detect broken build-systems
if [[ ${CMAKE_QA_SRC_DIR_READONLY} && ! ${CMAKE_IN_SOURCE_BUILD} ]]; then
chmod -R a-w "${S}"
fi
_CMAKE_SRC_PREPARE_HAS_RUN=1
}
# @VARIABLE: mycmakeargs
# @DEFAULT_UNSET
# @DESCRIPTION:
# Optional cmake defines as a bash array. Should be defined before calling
# src_configure.
# @CODE
# src_configure() {
# local mycmakeargs=(
# $(cmake_use_with openconnect)
# )
#
# cmake_src_configure
# }
# @CODE
# @FUNCTION: cmake_src_configure
# @DESCRIPTION:
# General function for configuring with cmake. Default behaviour is to start an
# out-of-source build.
cmake_src_configure() {
debug-print-function ${FUNCNAME} "$@"
[[ ${_CMAKE_SRC_PREPARE_HAS_RUN} ]] || \
die "FATAL: cmake_src_prepare has not been run"
_cmake_check_build_dir
# Fix xdg collision with sandbox
xdg_environment_reset
# Prepare Gentoo override rules (set valid compiler, append CPPFLAGS etc.)
local build_rules=${BUILD_DIR}/gentoo_rules.cmake
cat > "${build_rules}" <<- _EOF_ || die
set(CMAKE_ASM_COMPILE_OBJECT "<CMAKE_ASM_COMPILER> <DEFINES> <INCLUDES> ${CPPFLAGS} <FLAGS> -o <OBJECT> -c <SOURCE>" CACHE STRING "ASM compile command" FORCE)
set(CMAKE_ASM-ATT_COMPILE_OBJECT "<CMAKE_ASM-ATT_COMPILER> <DEFINES> <INCLUDES> ${CPPFLAGS} <FLAGS> -o <OBJECT> -c -x assembler <SOURCE>" CACHE STRING "ASM-ATT compile command" FORCE)
set(CMAKE_ASM-ATT_LINK_FLAGS "-nostdlib" CACHE STRING "ASM-ATT link flags" FORCE)
set(CMAKE_C_COMPILE_OBJECT "<CMAKE_C_COMPILER> <DEFINES> <INCLUDES> ${CPPFLAGS} <FLAGS> -o <OBJECT> -c <SOURCE>" CACHE STRING "C compile command" FORCE)
set(CMAKE_CXX_COMPILE_OBJECT "<CMAKE_CXX_COMPILER> <DEFINES> <INCLUDES> ${CPPFLAGS} <FLAGS> -o <OBJECT> -c <SOURCE>" CACHE STRING "C++ compile command" FORCE)
set(CMAKE_Fortran_COMPILE_OBJECT "<CMAKE_Fortran_COMPILER> <DEFINES> <INCLUDES> ${FCFLAGS} <FLAGS> -o <OBJECT> -c <SOURCE>" CACHE STRING "Fortran compile command" FORCE)
_EOF_
local myCC=$(tc-getCC) myCXX=$(tc-getCXX) myFC=$(tc-getFC)
# !!! IMPORTANT NOTE !!!
# Single slash below is intentional. CMake is weird and wants the
# CMAKE_*_VARIABLES split into two elements: the first one with
# compiler path, and the second one with all command-line options,
# space separated.
local toolchain_file=${BUILD_DIR}/gentoo_toolchain.cmake
cat > ${toolchain_file} <<- _EOF_ || die
set(CMAKE_ASM_COMPILER "${myCC/ /;}")
set(CMAKE_ASM-ATT_COMPILER "${myCC/ /;}")
set(CMAKE_C_COMPILER "${myCC/ /;}")
set(CMAKE_CXX_COMPILER "${myCXX/ /;}")
set(CMAKE_Fortran_COMPILER "${myFC/ /;}")
set(CMAKE_AR $(type -P $(tc-getAR)) CACHE FILEPATH "Archive manager" FORCE)
set(CMAKE_RANLIB $(type -P $(tc-getRANLIB)) CACHE FILEPATH "Archive index generator" FORCE)
set(CMAKE_SYSTEM_PROCESSOR "${CHOST%%-*}")
_EOF_
# We are using the C compiler for assembly by default.
local -x ASMFLAGS=${CFLAGS}
local -x PKG_CONFIG=$(tc-getPKG_CONFIG)
if tc-is-cross-compiler; then
local sysname
case "${KERNEL:-linux}" in
Cygwin) sysname="CYGWIN_NT-5.1" ;;
HPUX) sysname="HP-UX" ;;
linux) sysname="Linux" ;;
Winnt)
sysname="Windows"
cat >> "${toolchain_file}" <<- _EOF_ || die
set(CMAKE_RC_COMPILER $(tc-getRC))
_EOF_
;;
*) sysname="${KERNEL}" ;;
esac
cat >> "${toolchain_file}" <<- _EOF_ || die
set(CMAKE_SYSTEM_NAME "${sysname}")
_EOF_
if [ "${SYSROOT:-/}" != "/" ] ; then
# When cross-compiling with a sysroot (e.g. with crossdev's emerge wrappers)
# we need to tell cmake to use libs/headers from the sysroot but programs from / only.
cat >> "${toolchain_file}" <<- _EOF_ || die
set(CMAKE_FIND_ROOT_PATH "${SYSROOT}")
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
_EOF_
fi
fi
if use prefix-guest; then
cat >> "${build_rules}" <<- _EOF_ || die
# in Prefix we need rpath and must ensure cmake gets our default linker path
# right ... except for Darwin hosts
if(NOT APPLE)
set(CMAKE_SKIP_RPATH OFF CACHE BOOL "" FORCE)
set(CMAKE_PLATFORM_REQUIRED_RUNTIME_PATH "${EPREFIX}/usr/${CHOST}/lib/gcc;${EPREFIX}/usr/${CHOST}/lib;${EPREFIX}/usr/$(get_libdir);${EPREFIX}/$(get_libdir)" CACHE STRING "" FORCE)
else()
set(CMAKE_PREFIX_PATH "${EPREFIX}/usr" CACHE STRING "" FORCE)
set(CMAKE_MACOSX_RPATH ON CACHE BOOL "" FORCE)
set(CMAKE_SKIP_BUILD_RPATH OFF CACHE BOOL "" FORCE)
set(CMAKE_SKIP_RPATH OFF CACHE BOOL "" FORCE)
set(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE CACHE BOOL "" FORCE)
endif()
_EOF_
fi
# Common configure parameters (invariants)
local common_config=${BUILD_DIR}/gentoo_common_config.cmake
local libdir=$(get_libdir)
cat > "${common_config}" <<- _EOF_ || die
set(CMAKE_GENTOO_BUILD ON CACHE BOOL "Indicate Gentoo package build")
set(LIB_SUFFIX ${libdir/lib} CACHE STRING "library path suffix" FORCE)
set(CMAKE_INSTALL_LIBDIR ${libdir} CACHE PATH "Output directory for libraries")
set(CMAKE_INSTALL_INFODIR "${EPREFIX}/usr/share/info" CACHE PATH "")
set(CMAKE_INSTALL_MANDIR "${EPREFIX}/usr/share/man" CACHE PATH "")
set(CMAKE_USER_MAKE_RULES_OVERRIDE "${build_rules}" CACHE FILEPATH "Gentoo override rules")
set(CMAKE_INSTALL_DOCDIR "${EPREFIX}/usr/share/doc/${PF}" CACHE PATH "")
set(BUILD_SHARED_LIBS ON CACHE BOOL "")
_EOF_
if [[ -n ${_ECM_ECLASS} ]]; then
echo 'set(ECM_DISABLE_QMLPLUGINDUMP ON CACHE BOOL "")' >> "${common_config}" || die
fi
# See bug 689410
if [[ "${ARCH}" == riscv ]]; then
echo 'set(CMAKE_FIND_LIBRARY_CUSTOM_LIB_SUFFIX '"${libdir#lib}"' CACHE STRING "library search suffix" FORCE)' >> "${common_config}" || die
fi
if [[ "${NOCOLOR}" = true || "${NOCOLOR}" = yes ]]; then
echo 'set(CMAKE_COLOR_MAKEFILE OFF CACHE BOOL "pretty colors during make" FORCE)' >> "${common_config}" || die
fi
# Wipe the default optimization flags out of CMake
if [[ ${CMAKE_BUILD_TYPE} != Gentoo ]]; then
cat >> ${common_config} <<- _EOF_ || die
set(CMAKE_ASM_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
set(CMAKE_ASM-ATT_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
set(CMAKE_C_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
set(CMAKE_CXX_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
set(CMAKE_Fortran_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
set(CMAKE_EXE_LINKER_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
set(CMAKE_MODULE_LINKER_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
set(CMAKE_SHARED_LINKER_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
set(CMAKE_STATIC_LINKER_FLAGS_${CMAKE_BUILD_TYPE^^} "" CACHE STRING "")
_EOF_
fi
# Make the array a local variable since <=portage-2.1.6.x does not support
# global arrays (see bug #297255). But first make sure it is initialised.
[[ -z ${mycmakeargs} ]] && declare -a mycmakeargs=()
local mycmakeargstype=$(declare -p mycmakeargs 2>&-)
if [[ "${mycmakeargstype}" != "declare -a mycmakeargs="* ]]; then
die "mycmakeargs must be declared as array"
fi
local mycmakeargs_local=( "${mycmakeargs[@]}" )
local warn_unused_cli=""
if [[ ${CMAKE_WARN_UNUSED_CLI} == no ]] ; then
warn_unused_cli="--no-warn-unused-cli"
fi
local generator_name
case ${CMAKE_MAKEFILE_GENERATOR} in
ninja) generator_name="Ninja" ;;
emake) generator_name="Unix Makefiles" ;;
esac
# Common configure parameters (overridable)
# NOTE CMAKE_BUILD_TYPE can be only overridden via CMAKE_BUILD_TYPE eclass variable
# No -DCMAKE_BUILD_TYPE=xxx definitions will be in effect.
local cmakeargs=(
${warn_unused_cli}
-C "${common_config}"
-G "${generator_name}"
-DCMAKE_INSTALL_PREFIX="${EPREFIX}/usr"
"${mycmakeargs_local[@]}"
-DCMAKE_BUILD_TYPE="${CMAKE_BUILD_TYPE}"
-DCMAKE_TOOLCHAIN_FILE="${toolchain_file}"
"${MYCMAKEARGS}"
)
if [[ -n "${CMAKE_EXTRA_CACHE_FILE}" ]] ; then
cmakeargs+=( -C "${CMAKE_EXTRA_CACHE_FILE}" )
fi
pushd "${BUILD_DIR}" > /dev/null || die
debug-print "${LINENO} ${ECLASS} ${FUNCNAME}: mycmakeargs is ${mycmakeargs_local[*]}"
echo "${CMAKE_BINARY}" "${cmakeargs[@]}" "${CMAKE_USE_DIR}"
"${CMAKE_BINARY}" "${cmakeargs[@]}" "${CMAKE_USE_DIR}" || die "cmake failed"
popd > /dev/null || die
}
# @FUNCTION: cmake_src_compile
# @DESCRIPTION:
# General function for compiling with cmake.
# Automatically detects the build type. All arguments are passed to emake.
cmake_src_compile() {
debug-print-function ${FUNCNAME} "$@"
cmake_build "$@"
}
# @FUNCTION: cmake_build
# @DESCRIPTION:
# Function for building the package. Automatically detects the build type.
# All arguments are passed to emake.
cmake_build() {
debug-print-function ${FUNCNAME} "$@"
_cmake_check_build_dir
pushd "${BUILD_DIR}" > /dev/null || die
case ${CMAKE_MAKEFILE_GENERATOR} in
emake)
[[ -e Makefile ]] || die "Makefile not found. Error during configure stage."
case ${CMAKE_VERBOSE} in
OFF) emake "$@" ;;
*) emake VERBOSE=1 "$@" ;;
esac
;;
ninja)
[[ -e build.ninja ]] || die "build.ninja not found. Error during configure stage."
eninja "$@"
;;
esac
popd > /dev/null || die
}
# @FUNCTION: cmake-utils_src_make
# @INTERNAL
# @DESCRIPTION:
# Banned. Use cmake_build instead.
cmake-utils_src_make() {
die "cmake-utils_src_make is banned. Use cmake_build instead"
}
# @FUNCTION: cmake_src_test
# @DESCRIPTION:
# Function for testing the package. Automatically detects the build type.
cmake_src_test() {
debug-print-function ${FUNCNAME} "$@"
_cmake_check_build_dir
pushd "${BUILD_DIR}" > /dev/null || die
[[ -e CTestTestfile.cmake ]] || { echo "No tests found. Skipping."; return 0 ; }
[[ -n ${TEST_VERBOSE} ]] && myctestargs+=( --extra-verbose --output-on-failure )
set -- ctest -j "$(makeopts_jobs)" --test-load "$(makeopts_loadavg)" "${myctestargs[@]}" "$@"
echo "$@" >&2
if "$@" ; then
einfo "Tests succeeded."
popd > /dev/null || die
return 0
else
if [[ -n "${CMAKE_YES_I_WANT_TO_SEE_THE_TEST_LOG}" ]] ; then
# on request from Diego
eerror "Tests failed. Test log ${BUILD_DIR}/Testing/Temporary/LastTest.log follows:"
eerror "--START TEST LOG--------------------------------------------------------------"
cat "${BUILD_DIR}/Testing/Temporary/LastTest.log"
eerror "--END TEST LOG----------------------------------------------------------------"
die "Tests failed."
else
die "Tests failed. When you file a bug, please attach the following file: \n\t${BUILD_DIR}/Testing/Temporary/LastTest.log"
fi
# die might not die due to nonfatal
popd > /dev/null || die
return 1
fi
}
# @FUNCTION: cmake_src_install
# @DESCRIPTION:
# Function for installing the package. Automatically detects the build type.
cmake_src_install() {
debug-print-function ${FUNCNAME} "$@"
_cmake_check_build_dir
pushd "${BUILD_DIR}" > /dev/null || die
DESTDIR="${D}" ${CMAKE_MAKEFILE_GENERATOR} install "$@" ||
die "died running ${CMAKE_MAKEFILE_GENERATOR} install"
popd > /dev/null || die
pushd "${S}" > /dev/null || die
einstalldocs
popd > /dev/null || die
}
fi

236
eclass/common-lisp-3.eclass Normal file
View File

@ -0,0 +1,236 @@
# Copyright 1999-2019 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: common-lisp-3.eclass
# @MAINTAINER:
# Common Lisp project <common-lisp@gentoo.org>
# @BLURB: functions to support the installation of Common Lisp libraries
# @DESCRIPTION:
# Since Common Lisp libraries share similar structure, this eclass aims
# to provide a simple way to write ebuilds with these characteristics.
inherit eutils
# @ECLASS-VARIABLE: CLIMPLEMENTATIONS
# @DESCRIPTION:
# Common Lisp implementations
CLIMPLEMENTATIONS="sbcl clisp clozurecl cmucl ecls gcl abcl"
# @ECLASS-VARIABLE: CLSOURCEROOT
# @DESCRIPTION:
# Default path of Common Lisp libraries sources. Sources will
# be installed into ${CLSOURCEROOT}/${CLPACKAGE}.
CLSOURCEROOT="${ROOT%/}"/usr/share/common-lisp/source
# @ECLASS-VARIABLE: CLSYSTEMROOT
# @DESCRIPTION:
# Default path to find any asdf file. Any asdf files will be
# symlinked in ${CLSYSTEMROOT}/${CLSYSTEM} as they may be in
# an arbitrarily deeply nested directory under ${CLSOURCEROOT}/${CLPACKAGE}.
CLSYSTEMROOT="${ROOT%/}"/usr/share/common-lisp/systems
# @ECLASS-VARIABLE: CLPACKAGE
# @DESCRIPTION:
# Default package name. To override, set these after inheriting this eclass.
CLPACKAGE="${PN}"
PDEPEND="virtual/commonlisp"
EXPORT_FUNCTIONS src_compile src_install
# @FUNCTION: common-lisp-3_src_compile
# @DESCRIPTION:
# Since there's nothing to build in most cases, default doesn't do
# anything.
common-lisp-3_src_compile() {
true;
}
# @FUNCTION: absolute-path-p
# @DESCRIPTION:
# Returns true if ${1} is an absolute path.
absolute-path-p() {
[[ $# -eq 1 ]] || die "${FUNCNAME[0]} must receive one argument"
[[ ${1} == /* ]]
}
# @FUNCTION: common-lisp-install-one-source
# @DESCRIPTION:
# Installs ${2} source file in ${3} inside CLSOURCEROOT/CLPACKAGE.
common-lisp-install-one-source() {
[[ $# -eq 3 ]] || die "${FUNCNAME[0]} must receive exactly three arguments"
local fpredicate=${1}
local source=${2}
local target="${CLSOURCEROOT}/${CLPACKAGE}/${3}"
if absolute-path-p "${source}" ; then
die "Cannot install files with absolute path: ${source}"
fi
if ${fpredicate} "${source}" ; then
insinto "${target}"
doins "${source}" || die "Failed to install ${source} into $(dirname "${target}")"
fi
}
# @FUNCTION: lisp-file-p
# @USAGE: <file>
# @DESCRIPTION:
# Returns true if ${1} is lisp source file.
lisp-file-p() {
[[ $# -eq 1 ]] || die "${FUNCNAME[0]} must receive one argument"
[[ ${1} =~ \.(lisp|lsp|cl)$ ]]
}
# @FUNCTION: common-lisp-get-fpredicate
# @USAGE: <type>
# @DESCRIPTION:
# Outputs the corresponding predicate to check files of type ${1}.
common-lisp-get-fpredicate() {
[[ $# -eq 1 ]] || die "${FUNCNAME[0]} must receive one argument"
local ftype=${1}
case ${ftype} in
"lisp") echo "lisp-file-p" ;;
"all" ) echo "true" ;;
* ) die "Unknown filetype specifier ${ftype}" ;;
esac
}
# @FUNCTION: common-lisp-install-sources
# @USAGE: <path> [...]
# @DESCRIPTION:
# Recursively install lisp sources of type ${2} if ${1} is -t or
# Lisp by default. When given a directory, it will be recursively
# scanned for Lisp source files with suffixes: .lisp, .lsp or .cl.
common-lisp-install-sources() {
local ftype="lisp"
if [[ ${1} == "-t" ]] ; then
ftype=${2}
shift ; shift
fi
[[ $# -ge 1 ]] || die "${FUNCNAME[0]} must receive one non-option argument"
local fpredicate=$(common-lisp-get-fpredicate "${ftype}")
for path in "${@}" ; do
if [[ -f ${path} ]] ; then
common-lisp-install-one-source ${fpredicate} "${path}" "$(dirname "${path}")"
elif [[ -d ${path} ]] ; then
common-lisp-install-sources -t ${ftype} $(find "${path}" -type f)
else
die "${path} is neither a regular file nor a directory"
fi
done
}
# @FUNCTION: common-lisp-install-one-asdf
# @USAGE: <file>
# @DESCRIPTION:
# Installs ${1} asdf file in CLSOURCEROOT/CLPACKAGE and symlinks it in
# CLSYSTEMROOT.
common-lisp-install-one-asdf() {
[[ $# != 1 ]] && die "${FUNCNAME[0]} must receive exactly one argument"
# the suffix «.asd» is optional
local source=${1/.asd}.asd
common-lisp-install-one-source true "${source}" "$(dirname "${source}")"
local target="${CLSOURCEROOT%/}/${CLPACKAGE}/${source}"
dosym "${target}" "${CLSYSTEMROOT%/}/$(basename ${target})"
}
# @FUNCTION: common-lisp-install-asdf
# @USAGE: <path> [...]
# @DESCRIPTION:
# Installs all ASDF files and creates symlinks in CLSYSTEMROOT.
# When given a directory, it will be recursively scanned for ASDF
# files with extension .asd.
common-lisp-install-asdf() {
dodir "${CLSYSTEMROOT}"
[[ $# = 0 ]] && set - ${CLSYSTEMS}
[[ $# = 0 ]] && set - $(find . -type f -name \*.asd)
for sys in "${@}" ; do
common-lisp-install-one-asdf ${sys}
done
}
# @FUNCTION: common-lisp-3_src_install
# @DESCRIPTION:
# Recursively install Lisp sources, asdf files and most common doc files.
common-lisp-3_src_install() {
common-lisp-install-sources .
common-lisp-install-asdf
for i in AUTHORS README* HEADER TODO* CHANGELOG Change[lL]og CHANGES BUGS CONTRIBUTORS *NEWS* ; do
[[ -f ${i} ]] && dodoc ${i}
done
}
# @FUNCTION: common-lisp-find-lisp-impl
# @DESCRIPTION:
# Outputs an installed Common Lisp implementation. Transverses
# CLIMPLEMENTATIONS to find it.
common-lisp-find-lisp-impl() {
for lisp in ${CLIMPLEMENTATIONS} ; do
[[ "$(best_version dev-lisp/${lisp})" ]] && echo "${lisp}" && return
done
die "No CommonLisp implementation found"
}
# @FUNCTION: common-lisp-export-impl-args
# @USAGE: <lisp-implementation>
# @DESCRIPTION:
# Export a few variables containing the switches necessary
# to make the CL implementation perform basic functions:
# * CL_BINARY: Common Lisp implementation
# * CL_NORC: don't load syste-wide or user-specific initfiles
# * CL_LOAD: load a certain file
# * CL_EVAL: eval a certain expression at startup
common-lisp-export-impl-args() {
if [[ $# != 1 ]]; then
eerror "Usage: ${FUNCNAME[0]} lisp-implementation"
die "${FUNCNAME[0]}: wrong number of arguments: $#"
fi
CL_BINARY="${1}"
case "${CL_BINARY}" in
sbcl)
CL_NORC="--sysinit /dev/null --userinit /dev/null"
CL_LOAD="--load"
CL_EVAL="--eval"
;;
clisp)
CL_NORC="-norc"
CL_LOAD="-i"
CL_EVAL="-x"
;;
clozure | clozurecl | ccl | openmcl)
CL_BINARY="ccl"
CL_NORC="--no-init"
CL_LOAD="--load"
CL_EVAL="--eval"
;;
cmucl)
CL_NORC="-nositeinit -noinit"
CL_LOAD="-load"
CL_EVAL="-eval"
;;
ecl | ecls)
CL_BINARY="ecl"
CL_NORC="-norc"
CL_LOAD="-load"
CL_EVAL="-eval"
;;
abcl)
CL_NORC="--noinit"
CL_LOAD="--load"
CL_EVAL="--eval"
;;
*)
die "${CL_BINARY} is not supported by ${0}"
;;
esac
export CL_BINARY CL_NORC CL_LOAD CL_EVAL
}

160
eclass/cron.eclass Normal file
View File

@ -0,0 +1,160 @@
# Copyright 1999-2019 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: cron.eclass
# @MAINTAINER:
# maintainer-needed@gentoo.org
# @AUTHOR:
# Original Author: Aaron Walker <ka0ttic@gentoo.org>
# @BLURB: Some functions for cron
# @DESCRIPTION:
# Purpose: The main motivation for this eclass was to simplify
# the jungle known as src_install() in cron ebuilds. Using these
# functions also ensures that permissions are *always* reset,
# preventing the accidental installation of files with wrong perms.
#
# NOTE on defaults: the default settings in the below functions were
# chosen based on the most common setting among cron ebuilds.
#
# Please assign any bugs regarding this eclass to cron-bugs@gentoo.org.
inherit eutils flag-o-matic
EXPORT_FUNCTIONS pkg_postinst
SLOT="0"
DEPEND=">=sys-apps/sed-4.0.5"
RDEPEND=">=sys-process/cronbase-0.3.2"
for pn in vixie-cron bcron cronie dcron fcron; do
[[ ${pn} == "${PN}" ]] || RDEPEND="${RDEPEND} !sys-process/${pn}"
done
# @FUNCTION: docrondir
# @USAGE: [ dir ] [ perms ]
# @DESCRIPTION:
# Creates crontab directory
#
# Both arguments are optional. Everything after 'dir' is considered
# the permissions (same format as insopts).
#
# ex: docrondir /some/dir -m 0770 -o root -g cron
# docrondir /some/dir (uses default perms)
# docrondir -m0700 (uses default dir)
docrondir() {
# defaults
local perms="-m0750 -o root -g cron" dir="/var/spool/cron/crontabs"
if [[ -n $1 ]] ; then
case "$1" in
*/*)
dir=$1
shift
[[ -n $1 ]] && perms="$@"
;;
*)
perms="$@"
;;
esac
fi
diropts ${perms}
keepdir ${dir}
# reset perms to default
diropts -m0755
}
# @FUNCTION: docron
# @USAGE: [ exe ] [ perms ]
# @DESCRIPTION:
# Install cron executable
#
# Both arguments are optional.
#
# ex: docron -m 0700 -o root -g root ('exe' defaults to "cron")
# docron crond -m 0110
docron() {
local cron="cron" perms="-m 0750 -o root -g wheel"
if [[ -n $1 ]] ; then
case "$1" in
-*)
perms="$@"
;;
*)
cron=$1
shift
[[ -n $1 ]] && perms="$@"
;;
esac
fi
exeopts ${perms}
exeinto /usr/sbin
doexe ${cron} || die "failed to install ${cron}"
# reset perms to default
exeopts -m0755
}
# @FUNCTION: docrontab
# @USAGE: [ exe ] [ perms ]
# @DESCRIPTION:
# Install crontab executable
#
# Uses same semantics as docron.
docrontab() {
local crontab="crontab" perms="-m 4750 -o root -g cron"
if [[ -n $1 ]] ; then
case "$1" in
-*)
perms="$@"
;;
*)
crontab=$1
shift
[[ -n $1 ]] && perms="$@"
;;
esac
fi
exeopts ${perms}
exeinto /usr/bin
doexe ${crontab} || die "failed to install ${crontab}"
# reset perms to default
exeopts -m0755
# users expect /usr/bin/crontab to exist...
if [[ "${crontab##*/}" != "crontab" ]] ; then
dosym ${crontab##*/} /usr/bin/crontab || \
die "failed to create /usr/bin/crontab symlink"
fi
}
# @FUNCTION: cron_pkg_postinst
# @DESCRIPTION:
# Outputs a message about system crontabs
# daemons that have a true system crontab set CRON_SYSTEM_CRONTAB="yes"
cron_pkg_postinst() {
echo
# daemons that have a true system crontab set CRON_SYSTEM_CRONTAB="yes"
if [ "${CRON_SYSTEM_CRONTAB:-no}" != "yes" ] ; then
einfo "To activate /etc/cron.{hourly|daily|weekly|monthly} please run:"
einfo " crontab /etc/crontab"
einfo
einfo "!!! That will replace root's current crontab !!!"
einfo
fi
einfo "You may wish to read the Gentoo Linux Cron Guide, which can be"
einfo "found online at:"
einfo " https://wiki.gentoo.org/wiki/Cron"
echo
}

201
eclass/cuda.eclass Normal file
View File

@ -0,0 +1,201 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
case "${EAPI:-0}" in
0|1|2|3|4)
die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
;;
5|6|7)
;;
*)
die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
;;
esac
# @ECLASS: cuda.eclass
# @MAINTAINER:
# Gentoo Science Project <sci@gentoo.org>
# @SUPPORTED_EAPIS: 5 6 7
# @BLURB: Common functions for cuda packages
# @DESCRIPTION:
# This eclass contains functions to be used with cuda package. Currently it is
# setting and/or sanitizing NVCCFLAGS, the compiler flags for nvcc. This is
# automatically done and exported in src_prepare() or manually by calling
# cuda_sanatize.
# @EXAMPLE:
# inherit cuda
if [[ -z ${_CUDA_ECLASS} ]]; then
inherit flag-o-matic toolchain-funcs
[[ ${EAPI} == [56] ]] && inherit eapi7-ver
# @ECLASS-VARIABLE: NVCCFLAGS
# @DESCRIPTION:
# nvcc compiler flags (see nvcc --help), which should be used like
# CFLAGS for c compiler
: ${NVCCFLAGS:=-O2}
# @ECLASS-VARIABLE: CUDA_VERBOSE
# @DESCRIPTION:
# Being verbose during compilation to see underlying commands
: ${CUDA_VERBOSE:=true}
# @FUNCTION: cuda_gccdir
# @USAGE: [-f]
# @RETURN: gcc bindir compatible with current cuda, optionally (-f) prefixed with "--compiler-bindir "
# @DESCRIPTION:
# Helper for determination of the latest gcc bindir supported by
# then current nvidia cuda toolkit.
#
# Example:
# @CODE
# cuda_gccdir -f
# -> --compiler-bindir "/usr/x86_64-pc-linux-gnu/gcc-bin/4.6.3"
# @CODE
cuda_gccdir() {
debug-print-function ${FUNCNAME} "$@"
local dirs gcc_bindir ver vers="" flag
# Currently we only support the gnu compiler suite
if ! tc-is-gcc ; then
ewarn "Currently we only support the gnu compiler suite"
return 2
fi
while [[ "$1" ]]; do
case $1 in
-f)
flag="--compiler-bindir "
;;
*)
;;
esac
shift
done
if ! vers="$(cuda-config -s)"; then
eerror "Could not execute cuda-config"
eerror "Make sure >=dev-util/nvidia-cuda-toolkit-4.2.9-r1 is installed"
die "cuda-config not found"
fi
if [[ -z ${vers} ]]; then
die "Could not determine supported gcc versions from cuda-config"
fi
# Try the current gcc version first
ver=$(gcc-version)
if [[ -n "${ver}" ]] && [[ ${vers} =~ ${ver} ]]; then
dirs=( ${EPREFIX}/usr/*pc-linux-gnu/gcc-bin/${ver}*/ )
gcc_bindir="${dirs[${#dirs[@]}-1]}"
fi
if [[ -z ${gcc_bindir} ]]; then
ver=$(best_version "sys-devel/gcc")
ver=$(ver_cut 1-2 "${ver##*sys-devel/gcc-}")
if [[ -n "${ver}" ]] && [[ ${vers} =~ ${ver} ]]; then
dirs=( ${EPREFIX}/usr/*pc-linux-gnu/gcc-bin/${ver}*/ )
gcc_bindir="${dirs[${#dirs[@]}-1]}"
fi
fi
for ver in ${vers}; do
if has_version "=sys-devel/gcc-${ver}*"; then
dirs=( ${EPREFIX}/usr/*pc-linux-gnu/gcc-bin/${ver}*/ )
gcc_bindir="${dirs[${#dirs[@]}-1]}"
fi
done
if [[ -n ${gcc_bindir} ]]; then
if [[ -n ${flag} ]]; then
echo "${flag}\"${gcc_bindir%/}\""
else
echo "${gcc_bindir%/}"
fi
return 0
else
eerror "Only gcc version(s) ${vers} are supported,"
eerror "of which none is installed"
die "Only gcc version(s) ${vers} are supported"
return 1
fi
}
# @FUNCTION: cuda_sanitize
# @DESCRIPTION:
# Correct NVCCFLAGS by adding the necessary reference to gcc bindir and
# passing CXXFLAGS to underlying compiler without disturbing nvcc.
cuda_sanitize() {
debug-print-function ${FUNCNAME} "$@"
local rawldflags=$(raw-ldflags)
# Be verbose if wanted
[[ "${CUDA_VERBOSE}" == true ]] && NVCCFLAGS+=" -v"
# Tell nvcc where to find a compatible compiler
NVCCFLAGS+=" $(cuda_gccdir -f)"
# Tell nvcc which flags should be used for underlying C compiler
NVCCFLAGS+=" --compiler-options \"${CXXFLAGS}\" --linker-options \"${rawldflags// /,}\""
debug-print "Using ${NVCCFLAGS} for cuda"
export NVCCFLAGS
}
# @FUNCTION: cuda_add_sandbox
# @USAGE: [-w]
# @DESCRIPTION:
# Add nvidia dev nodes to the sandbox predict list.
# with -w, add to the sandbox write list.
cuda_add_sandbox() {
debug-print-function ${FUNCNAME} "$@"
local i
for i in /dev/nvidia*; do
if [[ $1 == '-w' ]]; then
addwrite $i
else
addpredict $i
fi
done
}
# @FUNCTION: cuda_toolkit_version
# @DESCRIPTION:
# echo the installed version of dev-util/nvidia-cuda-toolkit
cuda_toolkit_version() {
debug-print-function ${FUNCNAME} "$@"
local v
v="$(best_version dev-util/nvidia-cuda-toolkit)"
v="${v##*cuda-toolkit-}"
ver_cut 1-2 "${v}"
}
# @FUNCTION: cuda_cudnn_version
# @DESCRIPTION:
# echo the installed version of dev-libs/cudnn
cuda_cudnn_version() {
debug-print-function ${FUNCNAME} "$@"
local v
v="$(best_version dev-libs/cudnn)"
v="${v##*cudnn-}"
ver_cut 1-2 "${v}"
}
# @FUNCTION: cuda_src_prepare
# @DESCRIPTION:
# Sanitise and export NVCCFLAGS by default
cuda_src_prepare() {
debug-print-function ${FUNCNAME} "$@"
cuda_sanitize
}
EXPORT_FUNCTIONS src_prepare
_CUDA_ECLASS=1
fi

538
eclass/cvs.eclass Normal file
View File

@ -0,0 +1,538 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: cvs.eclass
# @MAINTAINER:
# vapier@gentoo.org (and anyone who wants to help)
# @SUPPORTED_EAPIS: 4 5 6 7
# @BLURB: This eclass provides generic cvs fetching functions
# @DESCRIPTION:
# This eclass provides the generic cvs fetching functions. To use this from an
# ebuild, set the ECLASS VARIABLES as specified below in your ebuild before
# inheriting. Then either leave the default src_unpack or extend over
# cvs_src_unpack. If you find that you need to call the cvs_* functions
# directly, I'd be interested to hear about it.
if [[ -z ${_CVS_ECLASS} ]]; then
_CVS_ECLASS=1
# TODO:
# Implement more auth types (gserver?, kserver?)
# Support additional remote shells with `ext' authentication (does
# anyone actually need to use it with anything other than SSH?)
# Users shouldn't change these settings! The ebuild/eclass inheriting
# this eclass will take care of that. If you want to set the global
# KDE cvs ebuilds' settings, see the comments in kde-source.eclass.
# @ECLASS-VARIABLE: ECVS_CVS_COMPRESS
# @DESCRIPTION:
# Set the default compression level. Has no effect when ECVS_CVS_COMMAND
# is defined by ebuild/user.
: ${ECVS_CVS_COMPRESS:=-z1}
# @ECLASS-VARIABLE: ECVS_CVS_OPTIONS
# @DESCRIPTION:
# Additional options to the cvs commands. Has no effect when ECVS_CVS_COMMAND
# is defined by ebuild/user.
: ${ECVS_CVS_OPTIONS:=-q -f}
# @ECLASS-VARIABLE: ECVS_CVS_COMMAND
# @DESCRIPTION:
# CVS command to run
#
# You can set, for example, "cvs -t" for extensive debug information
# on the cvs connection. The default of "cvs -q -f -z4" means to be
# quiet, to disregard the ~/.cvsrc config file and to use maximum
# compression.
: ${ECVS_CVS_COMMAND:=cvs ${ECVS_CVS_OPTIONS} ${ECVS_CVS_COMPRESS}}
# @ECLASS-VARIABLE: ECVS_UP_OPTS
# @DESCRIPTION:
# CVS options given after the cvs update command. Don't remove "-dP" or things
# won't work.
: ${ECVS_UP_OPTS:=-dP}
# @ECLASS-VARIABLE: ECVS_CO_OPTS
# @DEFAULT_UNSET
# @DESCRIPTION:
# CVS options given after the cvs checkout command.
# @ECLASS-VARIABLE: ECVS_OFFLINE
# @DESCRIPTION:
# Set this variable to a non-empty value to disable the automatic updating of
# a CVS source tree. This is intended to be set outside the cvs source
# tree by users.
: ${ECVS_OFFLINE:=${EVCS_OFFLINE}}
# @ECLASS-VARIABLE: ECVS_LOCAL
# @DEFAULT_UNSET
# @DESCRIPTION:
# If this is set, the CVS module will be fetched non-recursively.
# Refer to the information in the CVS man page regarding the -l
# command option (not the -l global option).
# @ECLASS-VARIABLE: ECVS_LOCALNAME
# @DEFAULT_UNSET
# @DESCRIPTION:
# Local name of checkout directory
#
# This is useful if the module on the server is called something
# common like 'driver' or is nested deep in a tree, and you don't like
# useless empty directories.
#
# WARNING: Set this only from within ebuilds! If set in your shell or
# some such, things will break because the ebuild won't expect it and
# have e.g. a wrong $S setting.
# @ECLASS-VARIABLE: ECVS_TOP_DIR
# @DESCRIPTION:
# The directory under which CVS modules are checked out.
: ${ECVS_TOP_DIR:="${PORTAGE_ACTUAL_DISTDIR-${DISTDIR}}/cvs-src"}
# @ECLASS-VARIABLE: ECVS_SERVER
# @DESCRIPTION:
# CVS path
#
# The format is "server:/dir", e.g. "anoncvs.kde.org:/home/kde".
# Remove the other parts of the full CVSROOT, which might look like
# ":pserver:anonymous@anoncvs.kde.org:/home/kde"; this is generated
# using other settings also.
#
# Set this to "offline" to disable fetching (i.e. to assume the module
# is already checked out in ECVS_TOP_DIR).
: ${ECVS_SERVER:="offline"}
# @ECLASS-VARIABLE: ECVS_MODULE
# @REQUIRED
# @DESCRIPTION:
# The name of the CVS module to be fetched
#
# This must be set when cvs_src_unpack is called. This can include
# several directory levels, i.e. "foo/bar/baz"
#[[ -z ${ECVS_MODULE} ]] && die "$ECLASS: error: ECVS_MODULE not set, cannot continue"
# @ECLASS-VARIABLE: ECVS_DATE
# @DEFAULT_UNSET
# @DESCRIPTION:
# The date of the checkout. See the -D date_spec option in the cvs
# man page for more details.
# @ECLASS-VARIABLE: ECVS_BRANCH
# @DEFAULT_UNSET
# @DESCRIPTION:
# The name of the branch/tag to use
#
# The default is "HEAD". The following default _will_ reset your
# branch checkout to head if used.
#: ${ECVS_BRANCH:="HEAD"}
# @ECLASS-VARIABLE: ECVS_AUTH
# @DESCRIPTION:
# Authentication method to use
#
# Possible values are "pserver" and "ext". If `ext' authentication is
# used, the remote shell to use can be specified in CVS_RSH (SSH is
# used by default). Currently, the only supported remote shell for
# `ext' authentication is SSH.
#
# Armando Di Cianno <fafhrd@gentoo.org> 2004/09/27
# - Added "no" as a server type, which uses no AUTH method, nor
# does it login
# e.g.
# "cvs -danoncvs@savannah.gnu.org:/cvsroot/backbone co System"
# ( from gnustep-apps/textedit )
: ${ECVS_AUTH:="pserver"}
# @ECLASS-VARIABLE: ECVS_USER
# @DESCRIPTION:
# Username to use for authentication on the remote server.
: ${ECVS_USER:="anonymous"}
# @ECLASS-VARIABLE: ECVS_PASS
# @DEFAULT_UNSET
# @DESCRIPTION:
# Password to use for authentication on the remote server
# @ECLASS-VARIABLE: ECVS_SSH_HOST_KEY
# @DEFAULT_UNSET
# @DESCRIPTION:
# If SSH is used for `ext' authentication, use this variable to
# specify the host key of the remote server. The format of the value
# should be the same format that is used for the SSH known hosts file.
#
# WARNING: If a SSH host key is not specified using this variable, the
# remote host key will not be verified.
# @ECLASS-VARIABLE: ECVS_CLEAN
# @DEFAULT_UNSET
# @DESCRIPTION:
# Set this to get a clean copy when updating (passes the
# -C option to cvs update)
PROPERTIES+=" live"
# add cvs to deps
# ssh is used for ext auth
DEPEND="dev-vcs/cvs"
if [[ ${ECVS_AUTH} == "ext" ]] ; then
#default to ssh
[[ -z ${CVS_RSH} ]] && export CVS_RSH="ssh"
if [[ ${CVS_RSH} != "ssh" ]] ; then
die "Support for ext auth with clients other than ssh has not been implemented yet"
fi
DEPEND+=" net-misc/openssh"
fi
case ${EAPI:-0} in
4|5|6) ;;
7) BDEPEND="${DEPEND}"; DEPEND="" ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} is not supported" ;;
esac
# called from cvs_src_unpack
cvs_fetch() {
# Make these options local variables so that the global values are
# not affected by modifications in this function.
local ECVS_COMMAND=${ECVS_COMMAND}
local ECVS_UP_OPTS=${ECVS_UP_OPTS}
local ECVS_CO_OPTS=${ECVS_CO_OPTS}
debug-print-function ${FUNCNAME} "$@"
# Update variables that are modified by ebuild parameters, which
# should be effective every time cvs_fetch is called, and not just
# every time cvs.eclass is inherited
# Handle parameter for local (non-recursive) fetching
if [[ -n ${ECVS_LOCAL} ]] ; then
ECVS_UP_OPTS+=" -l"
ECVS_CO_OPTS+=" -l"
fi
# Handle ECVS_BRANCH option
#
# Because CVS auto-switches branches, we just have to pass the
# correct -rBRANCH option when updating.
if [[ -n ${ECVS_BRANCH} ]] ; then
ECVS_UP_OPTS+=" -r${ECVS_BRANCH}"
ECVS_CO_OPTS+=" -r${ECVS_BRANCH}"
fi
# Handle ECVS_LOCALNAME, which specifies the local directory name
# to use. Note that the -d command option is not equivalent to
# the global -d option.
if [[ ${ECVS_LOCALNAME} != "${ECVS_MODULE}" ]] ; then
ECVS_CO_OPTS+=" -d ${ECVS_LOCALNAME}"
fi
if [[ -n ${ECVS_CLEAN} ]] ; then
ECVS_UP_OPTS+=" -C"
fi
if [[ -n ${ECVS_DATE} ]] ; then
ECVS_CO_OPTS+=" -D ${ECVS_DATE}"
ECVS_UP_OPTS+=" -D ${ECVS_DATE}"
fi
# Create the top dir if needed
if [[ ! -d ${ECVS_TOP_DIR} ]] ; then
# Note that the addwrite statements in this block are only
# there to allow creating ECVS_TOP_DIR; we allow writing
# inside it separately.
# This is because it's simpler than trying to find out the
# parent path of the directory, which would need to be the
# real path and not a symlink for things to work (so we can't
# just remove the last path element in the string)
debug-print "${FUNCNAME}: checkout mode. creating cvs directory"
addwrite /foobar
addwrite /
mkdir -p "/${ECVS_TOP_DIR}"
export SANDBOX_WRITE="${SANDBOX_WRITE//:\/foobar:\/}"
fi
# In case ECVS_TOP_DIR is a symlink to a dir, get the real path,
# otherwise addwrite() doesn't work.
cd -P "${ECVS_TOP_DIR}" >/dev/null
ECVS_TOP_DIR=$(pwd)
# Disable the sandbox for this dir
addwrite "${ECVS_TOP_DIR}"
# Determine the CVS command mode (checkout or update)
if [[ ! -d ${ECVS_TOP_DIR}/${ECVS_LOCALNAME}/CVS ]] ; then
mode=checkout
else
mode=update
fi
# Our server string (i.e. CVSROOT) without the password so it can
# be put in Root
local connection="${ECVS_AUTH}"
if [[ ${ECVS_AUTH} == "no" ]] ; then
local server="${ECVS_USER}@${ECVS_SERVER}"
else
[[ -n ${ECVS_PROXY} ]] && connection+=";proxy=${ECVS_PROXY}"
[[ -n ${ECVS_PROXY_PORT} ]] && connection+=";proxyport=${ECVS_PROXY_PORT}"
local server=":${connection}:${ECVS_USER}@${ECVS_SERVER}"
fi
# Switch servers automagically if needed
if [[ ${mode} == "update" ]] ; then
cd "/${ECVS_TOP_DIR}/${ECVS_LOCALNAME}"
local oldserver=$(cat CVS/Root)
if [[ ${server} != "${oldserver}" ]] ; then
einfo "Changing the CVS server from ${oldserver} to ${server}:"
debug-print "${FUNCNAME}: Changing the CVS server from ${oldserver} to ${server}:"
einfo "Searching for CVS directories ..."
local cvsdirs=$(find . -iname CVS -print)
debug-print "${FUNCNAME}: CVS directories found:"
debug-print "${cvsdirs}"
einfo "Modifying CVS directories ..."
local x
for x in ${cvsdirs} ; do
debug-print "In ${x}"
echo "${server}" > "${x}/Root"
done
fi
fi
# Prepare a cvspass file just for this session, we don't want to
# mess with ~/.cvspass
touch "${T}/cvspass"
export CVS_PASSFILE="${T}/cvspass"
# The server string with the password in it, for login (only used for pserver)
cvsroot_pass=":${connection}:${ECVS_USER}:${ECVS_PASS}@${ECVS_SERVER}"
# Ditto without the password, for checkout/update after login, so
# that the CVS/Root files don't contain the password in plaintext
if [[ ${ECVS_AUTH} == "no" ]] ; then
cvsroot_nopass="${ECVS_USER}@${ECVS_SERVER}"
else
cvsroot_nopass=":${connection}:${ECVS_USER}@${ECVS_SERVER}"
fi
# Commands to run
cmdlogin=( ${ECVS_CVS_COMMAND} -d "${cvsroot_pass}" login )
cmdupdate=( ${ECVS_CVS_COMMAND} -d "${cvsroot_nopass}" update ${ECVS_UP_OPTS} ${ECVS_LOCALNAME} )
cmdcheckout=( ${ECVS_CVS_COMMAND} -d "${cvsroot_nopass}" checkout ${ECVS_CO_OPTS} ${ECVS_MODULE} )
# Execute commands
cd "${ECVS_TOP_DIR}"
if [[ ${ECVS_AUTH} == "pserver" ]] ; then
einfo "Running ${cmdlogin[*]}"
"${cmdlogin[@]}" || die "cvs login command failed"
if [[ ${mode} == "update" ]] ; then
einfo "Running ${cmdupdate[*]}"
"${cmdupdate[@]}" || die "cvs update command failed"
elif [[ ${mode} == "checkout" ]] ; then
einfo "Running ${cmdcheckout[*]}"
"${cmdcheckout[@]}" || die "cvs checkout command failed"
fi
elif [[ ${ECVS_AUTH} == "ext" || ${ECVS_AUTH} == "no" ]] ; then
# Hack to support SSH password authentication
# Backup environment variable values
local CVS_ECLASS_ORIG_CVS_RSH="${CVS_RSH}"
if [[ ${SSH_ASKPASS+set} == "set" ]] ; then
local CVS_ECLASS_ORIG_SSH_ASKPASS="${SSH_ASKPASS}"
else
unset CVS_ECLASS_ORIG_SSH_ASKPASS
fi
if [[ ${DISPLAY+set} == "set" ]] ; then
local CVS_ECLASS_ORIG_DISPLAY="${DISPLAY}"
else
unset CVS_ECLASS_ORIG_DISPLAY
fi
if [[ ${CVS_RSH} == "ssh" ]] ; then
# Force SSH to use SSH_ASKPASS by creating python wrapper
export CVS_RSH="${T}/cvs_sshwrapper"
cat > "${CVS_RSH}"<<EOF
#!${EPREFIX}/usr/bin/python
import fcntl
import os
import sys
try:
fd = os.open('/dev/tty', 2)
TIOCNOTTY=0x5422
try:
fcntl.ioctl(fd, TIOCNOTTY)
except:
pass
os.close(fd)
except:
pass
newarglist = sys.argv[:]
EOF
# disable X11 forwarding which causes .xauth access violations
# - 20041205 Armando Di Cianno <fafhrd@gentoo.org>
echo "newarglist.insert(1, '-oClearAllForwardings=yes')" \
>> "${CVS_RSH}"
echo "newarglist.insert(1, '-oForwardX11=no')" \
>> "${CVS_RSH}"
# Handle SSH host key checking
local CVS_ECLASS_KNOWN_HOSTS="${T}/cvs_ssh_known_hosts"
echo "newarglist.insert(1, '-oUserKnownHostsFile=${CVS_ECLASS_KNOWN_HOSTS}')" \
>> "${CVS_RSH}"
if [[ -z ${ECVS_SSH_HOST_KEY} ]] ; then
ewarn "Warning: The SSH host key of the remote server will not be verified."
einfo "A temporary known hosts list will be used."
local CVS_ECLASS_STRICT_HOST_CHECKING="no"
touch "${CVS_ECLASS_KNOWN_HOSTS}"
else
local CVS_ECLASS_STRICT_HOST_CHECKING="yes"
echo "${ECVS_SSH_HOST_KEY}" > "${CVS_ECLASS_KNOWN_HOSTS}"
fi
echo -n "newarglist.insert(1, '-oStrictHostKeyChecking=" \
>> "${CVS_RSH}"
echo "${CVS_ECLASS_STRICT_HOST_CHECKING}')" \
>> "${CVS_RSH}"
echo "os.execv('${EPREFIX}/usr/bin/ssh', newarglist)" \
>> "${CVS_RSH}"
chmod a+x "${CVS_RSH}"
# Make sure DISPLAY is set (SSH will not use SSH_ASKPASS
# if DISPLAY is not set)
: ${DISPLAY:="DISPLAY"}
export DISPLAY
# Create a dummy executable to echo ${ECVS_PASS}
export SSH_ASKPASS="${T}/cvs_sshechopass"
if [[ ${ECVS_AUTH} != "no" ]] ; then
echo -en "#!/bin/bash\necho \"${ECVS_PASS}\"\n" \
> "${SSH_ASKPASS}"
else
echo -en "#!/bin/bash\nreturn\n" \
> "${SSH_ASKPASS}"
fi
chmod a+x "${SSH_ASKPASS}"
fi
if [[ ${mode} == "update" ]] ; then
einfo "Running ${cmdupdate[*]}"
"${cmdupdate[@]}" || die "cvs update command failed"
elif [[ ${mode} == "checkout" ]] ; then
einfo "Running ${cmdcheckout[*]}"
"${cmdcheckout[@]}" || die "cvs checkout command failed"
fi
# Restore environment variable values
export CVS_RSH="${CVS_ECLASS_ORIG_CVS_RSH}"
if [[ ${CVS_ECLASS_ORIG_SSH_ASKPASS+set} == "set" ]] ; then
export SSH_ASKPASS="${CVS_ECLASS_ORIG_SSH_ASKPASS}"
else
unset SSH_ASKPASS
fi
if [[ ${CVS_ECLASS_ORIG_DISPLAY+set} == "set" ]] ; then
export DISPLAY="${CVS_ECLASS_ORIG_DISPLAY}"
else
unset DISPLAY
fi
fi
}
# @FUNCTION: cvs_src_unpack
# @DESCRIPTION:
# The cvs src_unpack function, which will be exported
cvs_src_unpack() {
debug-print-function ${FUNCNAME} "$@"
debug-print "${FUNCNAME}: init:
ECVS_CVS_COMMAND=${ECVS_CVS_COMMAND}
ECVS_UP_OPTS=${ECVS_UP_OPTS}
ECVS_CO_OPTS=${ECVS_CO_OPTS}
ECVS_TOP_DIR=${ECVS_TOP_DIR}
ECVS_SERVER=${ECVS_SERVER}
ECVS_USER=${ECVS_USER}
ECVS_PASS=${ECVS_PASS}
ECVS_MODULE=${ECVS_MODULE}
ECVS_LOCAL=${ECVS_LOCAL}
ECVS_LOCALNAME=${ECVS_LOCALNAME}"
[[ -z ${ECVS_MODULE} ]] && die "ERROR: CVS module not set, cannot continue."
local ECVS_LOCALNAME=${ECVS_LOCALNAME:-${ECVS_MODULE}}
local sanitized_pn=$(echo "${PN}" | LC_ALL=C sed -e 's:[^A-Za-z0-9_]:_:g')
local offline_pkg_var="ECVS_OFFLINE_${sanitized_pn}"
if [[ -n ${!offline_pkg_var}${ECVS_OFFLINE} ]] || [[ ${ECVS_SERVER} == "offline" ]] ; then
# We're not required to fetch anything; the module already
# exists and shouldn't be updated.
if [[ -d ${ECVS_TOP_DIR}/${ECVS_LOCALNAME} ]] ; then
debug-print "${FUNCNAME}: offline mode"
else
debug-print "${FUNCNAME}: Offline mode specified but directory ${ECVS_TOP_DIR}/${ECVS_LOCALNAME} not found, exiting with error"
die "ERROR: Offline mode specified, but directory ${ECVS_TOP_DIR}/${ECVS_LOCALNAME} not found. Aborting."
fi
elif [[ -n ${ECVS_SERVER} ]] ; then # ECVS_SERVER!=offline --> real fetching mode
einfo "Fetching CVS module ${ECVS_MODULE} into ${ECVS_TOP_DIR} ..."
cvs_fetch
else # ECVS_SERVER not set
die "ERROR: CVS server not specified, cannot continue."
fi
einfo "Copying ${ECVS_MODULE} from ${ECVS_TOP_DIR} ..."
debug-print "Copying module ${ECVS_MODULE} local_mode=${ECVS_LOCAL} from ${ECVS_TOP_DIR} ..."
# This is probably redundant, but best to make sure.
mkdir -p "${WORKDIR}/${ECVS_LOCALNAME}"
if [[ -n ${ECVS_LOCAL} ]] ; then
cp -f "${ECVS_TOP_DIR}/${ECVS_LOCALNAME}"/* "${WORKDIR}/${ECVS_LOCALNAME}"
else
cp -Rf "${ECVS_TOP_DIR}/${ECVS_LOCALNAME}" "${WORKDIR}/${ECVS_LOCALNAME}/.."
fi
# Not exactly perfect, but should be pretty close #333773
export ECVS_VERSION=$(
find "${ECVS_TOP_DIR}/${ECVS_LOCALNAME}/" -ipath '*/CVS/Entries' -exec cat {} + | \
LC_ALL=C sort | \
sha1sum | \
awk '{print $1}'
)
# If the directory is empty, remove it; empty directories cannot
# exist in cvs. This happens when, for example, kde-source
# requests module/doc/subdir which doesn't exist. Still create
# the empty directory in workdir though.
if [[ $(ls -A "${ECVS_TOP_DIR}/${ECVS_LOCALNAME}") == "CVS" ]] ; then
debug-print "${FUNCNAME}: removing empty CVS directory ${ECVS_LOCALNAME}"
rm -rf "${ECVS_TOP_DIR}/${ECVS_LOCALNAME}"
fi
einfo "CVS module ${ECVS_MODULE} is now in ${WORKDIR}"
}
EXPORT_FUNCTIONS src_unpack
fi

220
eclass/darcs.eclass Normal file
View File

@ -0,0 +1,220 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: darcs.eclass
# @MAINTAINER:
# "Gentoo's Haskell Language team" <haskell@gentoo.org>
# Sergei Trofimovich <slyfox@gentoo.org>
# @AUTHOR:
# Original Author: Jeffrey Yasskin <jyasskin@mail.utexas.edu>
# <rphillips@gentoo.org> (tla eclass author)
# Andres Loeh <kosmikus@gentoo.org> (darcs.eclass author)
# Alexander Vershilov <alexander.vershilov@gmail.com> (various contributions)
# @BLURB: This eclass provides functions for fetch and unpack darcs repositories
# @DEPRECATED: none
# @DESCRIPTION:
# This eclass provides the generic darcs fetching functions.
#
# Define the EDARCS_REPOSITORY variable at least.
# The ${S} variable is set to ${WORKDIR}/${P}.
# TODO:
# support for tags
# eshopts_{push,pop}
case "${EAPI:-0}" in
4|5|6) inherit eutils ;;
7) inherit estack ;;
*) ;;
esac
# Don't download anything other than the darcs repository
SRC_URI=""
# You shouldn't change these settings yourself! The ebuild/eclass inheriting
# this eclass will take care of that.
# --- begin ebuild-configurable settings
# darcs command to run
# @ECLASS-VARIABLE: EDARCS_DARCS_CMD
# @DESCRIPTION:
# Path to darcs binary.
: ${EDARCS_DARCS_CMD:=darcs}
# darcs commands with command-specific options
# @ECLASS-VARIABLE: EDARCS_GET_CMD
# @DESCRIPTION:
# First fetch darcs command.
: ${EDARCS_GET_CMD:=get --lazy}
# @ECLASS-VARIABLE: EDARCS_UPDATE_CMD
# @DESCRIPTION:
# Repo update darcs command.
: ${EDARCS_UPDATE_CMD:=pull}
# @ECLASS-VARIABLE: EDARCS_OPTIONS
# @DESCRIPTION:
# Options to pass to both the "get" and "update" commands
: ${EDARCS_OPTIONS:=--set-scripts-executable}
# @ECLASS-VARIABLE: EDARCS_TOP_DIR
# @DESCRIPTION:
# Where the darcs repositories are stored/accessed
: ${EDARCS_TOP_DIR:=${PORTAGE_ACTUAL_DISTDIR-${DISTDIR}}/darcs-src}
# @ECLASS-VARIABLE: EDARCS_REPOSITORY
# @DESCRIPTION:
# The URI to the repository.
: ${EDARCS_REPOSITORY:=}
# @ECLASS-VARIABLE: EDARCS_OFFLINE
# @DESCRIPTION:
# Set this variable to a non-empty value to disable the automatic updating of
# a darcs repository. This is intended to be set outside the darcs source
# tree by users. Defaults to EVCS_OFFLINE value.
: ${EDARCS_OFFLINE:=${EVCS_OFFLINE}}
# @ECLASS-VARIABLE: EDARCS_CLEAN
# @DESCRIPTION:
# Set this to something to get a clean copy when updating
# (removes the working directory, then uses EDARCS_GET_CMD to
# re-download it.)
: ${EDARCS_CLEAN:=}
# --- end ebuild-configurable settings ---
PROPERTIES+=" live"
case ${EAPI:-0} in
[0-6]) # no need to care about 5-HDEPEND and similar
DEPEND="dev-vcs/darcs
net-misc/rsync"
;;
*)
BDEPEND="dev-vcs/darcs
net-misc/rsync"
;;
esac
# @FUNCTION: darcs_patchcount
# @DESCRIPTION:
# Internal function to determine amount of patches in repository.
darcs_patchcount() {
set -- $(HOME="${EDARCS_TOP_DIR}" ${EDARCS_DARCS_CMD} show repo --repodir="${EDARCS_TOP_DIR}/${EDARCS_LOCALREPO}" | grep "Num Patches")
# handle string like: " Num Patches: 3860"
echo ${3}
}
# @FUNCTION: darcs_fetch
# @DESCRIPTION:
# Internal function is called from darcs_src_unpack
darcs_fetch() {
# The local directory to store the repository (useful to ensure a
# unique local name); relative to EDARCS_TOP_DIR
[[ -z ${EDARCS_LOCALREPO} ]] && [[ -n ${EDARCS_REPOSITORY} ]] \
&& EDARCS_LOCALREPO=${EDARCS_REPOSITORY%/} \
&& EDARCS_LOCALREPO=${EDARCS_LOCALREPO##*/}
debug-print-function ${FUNCNAME} $*
if [[ -n ${EDARCS_CLEAN} ]]; then
addwrite "${EDARCS_TOP_DIR}/${EDARCS_LOCALREPO}"
rm -rf "${EDARCS_TOP_DIR}/${EDARCS_LOCALREPO}"
fi
# create the top dir if needed
if [[ ! -d ${EDARCS_TOP_DIR} ]]; then
# note that the addwrite statements in this block are only there to allow creating EDARCS_TOP_DIR;
# we've already allowed writing inside it
# this is because it's simpler than trying to find out the parent path of the directory, which
# would need to be the real path and not a symlink for things to work (so we can't just remove
# the last path element in the string)
debug-print "${FUNCNAME}: checkout mode. creating darcs directory"
addwrite /foobar
addwrite /
mkdir -p "${EDARCS_TOP_DIR}"
export SANDBOX_WRITE="${SANDBOX_WRITE//:\/foobar:\/}"
fi
# in case EDARCS_DARCS_DIR is a symlink to a dir, get the real
# dir's path, otherwise addwrite() doesn't work.
pushd . || die
cd -P "${EDARCS_TOP_DIR}" > /dev/null
EDARCS_TOP_DIR="`/bin/pwd`"
# disable the sandbox for this dir
addwrite "${EDARCS_TOP_DIR}"
# determine checkout or update mode and change to the right directory.
if [[ ! -d "${EDARCS_TOP_DIR}/${EDARCS_LOCALREPO}/_darcs" ]]; then
mode=get
cd "${EDARCS_TOP_DIR}"
else
mode=update
cd "${EDARCS_TOP_DIR}/${EDARCS_LOCALREPO}"
fi
# commands to run
local cmdget="${EDARCS_DARCS_CMD} ${EDARCS_GET_CMD} ${EDARCS_OPTIONS} --repo-name=${EDARCS_LOCALREPO} ${EDARCS_REPOSITORY}"
local cmdupdate="${EDARCS_DARCS_CMD} ${EDARCS_UPDATE_CMD} --all ${EDARCS_OPTIONS} ${EDARCS_REPOSITORY}"
if [[ ${mode} == "get" ]]; then
einfo "Running ${cmdget}"
HOME="${EDARCS_TOP_DIR}" ${cmdget} || die "darcs get command failed"
elif [[ -n ${EDARCS_OFFLINE} ]] ; then
einfo "Offline update"
elif [[ ${mode} == "update" ]]; then
einfo "Running ${cmdupdate}"
HOME="${EDARCS_TOP_DIR}" ${cmdupdate} || die "darcs update command failed"
fi
export EDARCS_PATCHCOUNT=$(darcs_patchcount)
einfo " patches in repo: ${EDARCS_PATCHCOUNT}"
popd || die
}
# @FUNCTION: darcs_src_unpack
# @DESCRIPTION:
# src_upack function
darcs_src_unpack() {
# The local directory to store the repository (useful to ensure a
# unique local name); relative to EDARCS_TOP_DIR
[[ -z ${EDARCS_LOCALREPO} ]] && [[ -n ${EDARCS_REPOSITORY} ]] \
&& EDARCS_LOCALREPO=${EDARCS_REPOSITORY%/} \
&& EDARCS_LOCALREPO=${EDARCS_LOCALREPO##*/}
debug-print-function ${FUNCNAME} $*
debug-print "${FUNCNAME}: init:
EDARCS_DARCS_CMD=${EDARCS_DARCS_CMD}
EDARCS_GET_CMD=${EDARCS_GET_CMD}
EDARCS_UPDATE_CMD=${EDARCS_UPDATE_CMD}
EDARCS_OPTIONS=${EDARCS_OPTIONS}
EDARCS_TOP_DIR=${EDARCS_TOP_DIR}
EDARCS_REPOSITORY=${EDARCS_REPOSITORY}
EDARCS_LOCALREPO=${EDARCS_LOCALREPO}
EDARCS_CLEAN=${EDARCS_CLEAN}"
einfo "Fetching darcs repository ${EDARCS_REPOSITORY} into ${EDARCS_TOP_DIR}..."
darcs_fetch
einfo "Copying ${EDARCS_LOCALREPO} from ${EDARCS_TOP_DIR}..."
debug-print "Copying ${EDARCS_LOCALREPO} from ${EDARCS_TOP_DIR}..."
# probably redundant, but best to make sure
# Use ${WORKDIR}/${P} rather than ${S} so user can point ${S} to something inside.
mkdir -p "${WORKDIR}/${P}"
eshopts_push -s dotglob # get any dotfiles too.
rsync -rlpgo "${EDARCS_TOP_DIR}/${EDARCS_LOCALREPO}"/* "${WORKDIR}/${P}"
eshopts_pop
einfo "Darcs repository contents are now in ${WORKDIR}/${P}"
}
EXPORT_FUNCTIONS src_unpack

119
eclass/db-use.eclass Normal file
View File

@ -0,0 +1,119 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# This is a common location for functions that aid the use of sys-libs/db
#
# Bugs: maintainer-needed@gentoo.org
# multilib is used for get_libname in all EAPI
case "${EAPI:-0}" in
0|1|2|3|4|5|6) inherit eapi7-ver multilib ;;
*) inherit multilib ;;
esac
#Convert a version to a db slot
db_ver_to_slot() {
if [ $# -ne 1 ]; then
eerror "Function db_ver_to_slot needs one argument" >&2
eerror "args given:" >&2
for f in $@
do
eerror " - \"$@\"" >&2
done
return 1
fi
# 5.0.x uses 5.0 as slot value, so this replacement will break it;
# older sys-libs/db might have been using this but it's no longer
# the case, so make it work for latest rather than older stuff.
# echo -n "${1/.0/}"
echo -n "$1"
}
#Find the version that correspond to the given atom
db_findver() {
has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
if [ $# -ne 1 ]; then
eerror "Function db_findver needs one argument" >&2
eerror "args given:" >&2
for f in $@
do
eerror " - \"$@\"" >&2
done
return 1
fi
PKG="$(best_version $1)"
VER="$(ver_cut 1-2 "${PKG/*db-/}")"
if [ -d "${EPREFIX}"/usr/include/db$(db_ver_to_slot "$VER") ]; then
#einfo "Found db version ${VER}" >&2
echo -n "$VER"
return 0
else
return 1
fi
}
# Get the include dir for berkeley db.
# This function has two modes. Without any arguments it will give the best
# version available. With arguments that form the versions of db packages
# to test for, it will aim to find the library corresponding to it.
db_includedir() {
has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
if [ $# -eq 0 ]; then
VER="$(db_findver sys-libs/db)" || return 1
VER="$(db_ver_to_slot "$VER")"
echo "include version ${VER}" >&2
if [ -d "${EPREFIX}/usr/include/db${VER}" ]; then
echo -n "${EPREFIX}/usr/include/db${VER}"
return 0
else
eerror "sys-libs/db package requested, but headers not found" >&2
return 1
fi
else
#arguments given
for x in $@
do
if VER=$(db_findver "=sys-libs/db-${x}*") &&
[ -d "${EPREFIX}/usr/include/db$(db_ver_to_slot $VER)" ]; then
echo -n "${EPREFIX}/usr/include/db$(db_ver_to_slot $VER)"
return 0
fi
done
eerror "No suitable db version found"
return 1
fi
}
# Get the library name for berkeley db. Something like "db-4.2" will be the
# outcome. This function has two modes. Without any arguments it will give
# the best version available. With arguments that form the versions of db
# packages to test for, it will aim to find the library corresponding to it.
db_libname() {
has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
if [ $# -eq 0 ]; then
VER="$(db_findver sys-libs/db)" || return 1
if [ -e "${EPREFIX}/usr/$(get_libdir)/libdb-${VER}$(get_libname)" ]; then
echo -n "db-${VER}"
return 0
else
eerror "sys-libs/db package requested, but library not found" >&2
return 1
fi
else
#arguments given
for x in $@
do
if VER=$(db_findver "=sys-libs/db-${x}*"); then
if [ -e "${EPREFIX}/usr/$(get_libdir)/libdb-${VER}$(get_libname)" ]; then
echo -n "db-${VER}"
return 0
fi
fi
done
eerror "No suitable db version found" >&2
return 1
fi
}

203
eclass/db.eclass Normal file
View File

@ -0,0 +1,203 @@
# Copyright 1999-2021 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: db.eclass
# @MAINTAINER:
# base-system@gentoo.org
# @BLURB: Internal eclass used by sys-libs/db ebuilds
inherit eutils multilib multiprocessing
IUSE="doc test examples"
EXPORT_FUNCTIONS src_test
DEPEND="test? ( >=dev-lang/tcl-8.4 )"
RDEPEND=""
db_fix_so() {
has "${EAPI:-0}" 0 1 2 && ! use prefix && EROOT="${ROOT}"
LIB="${EROOT}/usr/$(get_libdir)"
cd "${LIB}" || die
# first clean up old symlinks
local soext=$(get_libname)
find "${LIB}" -maxdepth 1 -type l -name 'libdb[1._-]*'"${soext#.}" -delete || die
soext=$(get_libname "[23]")
find "${LIB}" -maxdepth 1 -type l -name 'libdb[1._-]*'"${soext#.}" -delete || die
find "${LIB}" -maxdepth 1 -type l -name 'libdb[1._-]*a' -delete || die
# now rebuild all the correct ones
local ext
for ext in so dylib a; do
for name in libdb libdb_{cxx,tcl,java,sql,stl}; do
target="$(find . -maxdepth 1 -type f -name "${name}-*.${ext}" |sort -V |tail -n 1)"
[[ -n "${target}" ]] && ln -sf ${target//.\//} ${name}.${ext}
done;
done;
# db[23] gets some extra-special stuff
if [[ -f libdb1$(get_libname 2) ]]; then
ln -sf libdb1$(get_libname 2) libdb$(get_libname 2)
ln -sf libdb1$(get_libname 2) libdb1$(get_libname)
ln -sf libdb1$(get_libname 2) libdb-1$(get_libname)
fi
# what do we do if we ever get 3.3 ?
local i
for i in libdb libdb_{cxx,tcl,java,sql,stl}; do
if [[ -f $i-3.2$(get_libname) ]]; then
ln -sf $i-3.2$(get_libname) $i-3$(get_libname)
ln -sf $i-3.2$(get_libname) $i$(get_libname 3)
fi
done
# do the same for headers now
# but since there are only two of them, just overwrite them
cd "${EROOT}"/usr/include
target="$(find . -maxdepth 1 -type d -name 'db[0-9]*' | sort -V |cut -d/ -f2- | tail -n1)"
if [[ -n "${target}" ]] && [[ -e "${target}/db.h" ]] && ( ! [[ -e db.h ]] || [[ -h db.h ]] ); then
einfo "Creating db.h symlinks to ${target}"
ln -sf "${target}"/db.h .
ln -sf "${target}"/db_185.h .
elif [[ ! -e "${target}/db.h" ]]; then
if [[ -n "${target}" ]]; then
ewarn "Could not find ${target}/db.h"
elif [[ -h db.h ]]; then
einfo "Apparently you just removed the last instance of $PN. Removing the symlinks"
rm -f db.h db_185.h
fi
fi
}
db_src_install_doc() {
has "${EAPI:-0}" 0 1 2 && ! use prefix && ED="${D}"
# not everybody wants this wad of documentation as it is primarily API docs
if use doc; then
dodir /usr/share/doc/${PF}/html
mv "${ED}"/usr/docs/* "${ED}"/usr/share/doc/${PF}/html/ || die
rm -rf "${ED}"/usr/docs
else
rm -rf "${ED}"/usr/docs
fi
db_src_install_examples
}
db_src_install_examples() {
has "${EAPI:-0}" 0 1 2 && ! use prefix && ED="${D}"
if use examples ; then
local langs=( c cxx stl )
[[ "${IUSE/java}" != "${IUSE}" ]] \
&& use java \
&& langs+=( java )
local i
for i in ${langs[@]} ; do
destdir="/usr/share/doc/${PF}/"
src="${S}/../examples_${i}/"
if [[ -f "${src}" ]]; then
dodir "${destdir}"
cp -ra "${src}" "${ED}${destdir}/" || die
fi
done
fi
}
db_src_install_usrbinslot() {
has "${EAPI:-0}" 0 1 2 && ! use prefix && ED="${D}"
# slot all program names to avoid overwriting
local fname
for fname in "${ED}"/usr/bin/db*
do
dn="$(dirname "${fname}")"
bn="$(basename "${fname}")"
bn="${bn/db/db${SLOT}}"
mv "${fname}" "${dn}/${bn}" || \
die "Failed to rename ${fname} to ${dn}/${bn}"
done
}
db_src_install_headerslot() {
has "${EAPI:-0}" 0 1 2 && ! use prefix && ED="${D}"
# install all headers in a slotted location
dodir /usr/include/db${SLOT}
mv "${ED}"/usr/include/*.h "${ED}"/usr/include/db${SLOT}/ || die
}
db_src_install_usrlibcleanup() {
has "${EAPI:-0}" 0 1 2 && ! use prefix && ED="${D}"
LIB="${ED}/usr/$(get_libdir)"
# Clean out the symlinks so that they will not be recorded in the
# contents (bug #60732)
if [[ "${ED}" = "" ]]; then
die "Calling clean_links while \${ED} not defined"
fi
if [[ -e "${LIB}"/libdb.a ]] && [[ ! -e "${LIB}"/libdb-${SLOT}.a ]]; then
einfo "Moving libdb.a to a versioned name"
mv "${LIB}/libdb.a" "${LIB}/libdb-${SLOT}.a" || die
fi
if [[ -e "${LIB}"/libdb_cxx.a ]] && [[ ! -e "${LIB}"/libdb_cxx-${SLOT}.a ]]; then
einfo "Moving libdb_cxx.a to a versioned name"
mv "${LIB}/libdb_cxx.a" "${LIB}/libdb_cxx-${SLOT}.a" || die
fi
local soext=$(get_libname)
find "${LIB}" -maxdepth 1 -type l -name 'libdb[1._-]*'"${soext#.}" -delete || die
soext=$(get_libname "[23]")
find "${LIB}" -maxdepth 1 -type l -name 'libdb[1._-]*'"${soext#.}" -delete || die
einfo "removing unversioned static archives"
find "${LIB}" -maxdepth 1 -type l -name 'libdb[1._-]*a' -delete || die
rm -f \
"${ED}"/usr/include/{db,db_185}.h \
"${LIB}"/libdb{,_{cxx,sql,stl,java,tcl}}.a
}
db_src_test() {
if [[ $UID -eq 0 ]]; then
M="You must run the testsuite as non-root, skipping"
ewarn "${M}"
elog "${M}"
return 0
fi
if use tcl; then
einfo "Running sys-libs/db testsuite"
ewarn "This can take 6+ hours on modern machines"
# Fix stuff that fails with relative paths, and upstream moving files
# around...
local test_parallel='' t
for t in \
"${S}"/test/parallel.tcl \
"${S}"/../test/parallel.tcl \
"${S}"/test/tcl/parallel.tcl \
"${S}"/../test/tcl/parallel.tcl \
; do
[[ -f "${t}" ]] && test_parallel="${t}" && break
done
sed -ri \
-e '/regsub .test_path ./s,(regsub),#\1,g' \
-e '/regsub .src_root ./s,(regsub),#\1,g' \
-e '/regsub .tcl_utils ./s,(regsub),#\1,g' \
"${test_parallel}"
cd "${S}"
for t in \
../test/test.tcl \
../test/tcl/test.tcl \
; do
[[ -f "${t}" ]] && testbase="${t}" && break
done
echo "source ${t}" > testrunner.tcl
echo "run_parallel $(makeopts_jobs) run_std" >> testrunner.tcl
tclsh testrunner.tcl
grep -Eqs '^FAIL' ALL.OUT* && die "Some tests failed, please see ${S}/ALL.OUT*"
else
eerror "You must have USE=tcl to run the sys-libs/db testsuite."
fi
}

381
eclass/depend.apache.eclass Normal file
View File

@ -0,0 +1,381 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: depend.apache.eclass
# @MAINTAINER:
# apache-devs@gentoo.org
# @SUPPORTED_EAPIS: 0 2 3 4 5 6 7
# @BLURB: Functions to allow ebuilds to depend on apache
# @DESCRIPTION:
# This eclass handles depending on apache in a sane way and provides information
# about where certain binaries and configuration files are located.
#
# To make use of this eclass simply call one of the need/want_apache functions
# described below. Make sure you use the need/want_apache call after you have
# defined DEPEND and RDEPEND. Also note that you can not rely on the automatic
# RDEPEND=DEPEND that portage does if you use this eclass.
#
# See Bug 107127 for more information.
#
# @EXAMPLE:
#
# Here is an example of an ebuild depending on apache:
#
# @CODE
# DEPEND="virtual/Perl-CGI"
# RDEPEND="${DEPEND}"
# need_apache2
# @CODE
#
# Another example which demonstrates non-standard IUSE options for optional
# apache support:
#
# @CODE
# DEPEND="server? ( virtual/Perl-CGI )"
# RDEPEND="${DEPEND}"
# want_apache2 server
#
# pkg_setup() {
# depend.apache_pkg_setup server
# }
# @CODE
case ${EAPI:-0} in
0|2|3|4|5)
inherit multilib
;;
6|7)
;;
*)
die "EAPI=${EAPI} is not supported by depend.apache.eclass"
;;
esac
# ==============================================================================
# INTERNAL VARIABLES
# ==============================================================================
# @ECLASS-VARIABLE: APACHE_VERSION
# @DESCRIPTION:
# Stores the version of apache we are going to be ebuilding.
# This variable is set by the want/need_apache functions.
# @ECLASS-VARIABLE: APXS
# @DESCRIPTION:
# Path to the apxs tool.
# This variable is set by the want/need_apache functions.
# @ECLASS-VARIABLE: APACHE_BIN
# @DESCRIPTION:
# Path to the apache binary.
# This variable is set by the want/need_apache functions.
# @ECLASS-VARIABLE: APACHE_CTL
# @DESCRIPTION:
# Path to the apachectl tool.
# This variable is set by the want/need_apache functions.
# @ECLASS-VARIABLE: APACHE_BASEDIR
# @DESCRIPTION:
# Path to the server root directory.
# This variable is set by the want/need_apache functions (EAPI=0 through 5)
# or depend.apache_pkg_setup (EAPI=6 and later).
# @ECLASS-VARIABLE: APACHE_CONFDIR
# @DESCRIPTION:
# Path to the configuration file directory.
# This variable is set by the want/need_apache functions.
# @ECLASS-VARIABLE: APACHE_MODULES_CONFDIR
# @DESCRIPTION:
# Path where module configuration files are kept.
# This variable is set by the want/need_apache functions.
# @ECLASS-VARIABLE: APACHE_VHOSTS_CONFDIR
# @DESCRIPTION:
# Path where virtual host configuration files are kept.
# This variable is set by the want/need_apache functions.
# @ECLASS-VARIABLE: APACHE_MODULESDIR
# @DESCRIPTION:
# Path where we install modules.
# This variable is set by the want/need_apache functions (EAPI=0 through 5)
# or depend.apache_pkg_setup (EAPI=6 and later).
# @ECLASS-VARIABLE: APACHE_DEPEND
# @DESCRIPTION:
# Dependencies for Apache
APACHE_DEPEND="www-servers/apache"
# @ECLASS-VARIABLE: APACHE2_DEPEND
# @DESCRIPTION:
# Dependencies for Apache 2.x
APACHE2_DEPEND="=www-servers/apache-2*"
# @ECLASS-VARIABLE: APACHE2_2_DEPEND
# @DESCRIPTION:
# Dependencies for Apache 2.2.x
APACHE2_2_DEPEND="=www-servers/apache-2.2*"
# @ECLASS-VARIABLE: APACHE2_4_DEPEND
# @DESCRIPTION:
# Dependencies for Apache 2.4.x
APACHE2_4_DEPEND="=www-servers/apache-2.4*"
# ==============================================================================
# INTERNAL FUNCTIONS
# ==============================================================================
_init_apache2() {
debug-print-function $FUNCNAME $*
# WARNING: Do not use these variables with anything that is put
# into the dependency cache (DEPEND/RDEPEND/etc)
APACHE_VERSION="2"
APXS="/usr/bin/apxs"
APACHE_BIN="/usr/sbin/apache2"
APACHE_CTL="/usr/sbin/apache2ctl"
APACHE_INCLUDEDIR="/usr/include/apache2"
APACHE_CONFDIR="/etc/apache2"
APACHE_MODULES_CONFDIR="${APACHE_CONFDIR}/modules.d"
APACHE_VHOSTS_CONFDIR="${APACHE_CONFDIR}/vhosts.d"
case ${EAPI:-0} in
0|2|3|4|5)
_init_apache2_late
;;
esac
}
_init_apache2_late() {
APACHE_BASEDIR="/usr/$(get_libdir)/apache2"
APACHE_MODULESDIR="${APACHE_BASEDIR}/modules"
}
_init_no_apache() {
debug-print-function $FUNCNAME $*
APACHE_VERSION="0"
}
# ==============================================================================
# PUBLIC FUNCTIONS
# ==============================================================================
# @FUNCTION: depend.apache_pkg_setup
# @USAGE: [myiuse]
# @DESCRIPTION:
# An ebuild calls this in pkg_setup() to initialize variables for optional
# apache-2.x support. If the myiuse parameter is not given it defaults to
# apache2.
depend.apache_pkg_setup() {
debug-print-function $FUNCNAME $*
if [[ "${EBUILD_PHASE}" != "setup" ]]; then
die "$FUNCNAME() should be called in pkg_setup()"
fi
local myiuse=${1:-apache2}
case ${EAPI:-0} in
0|2|3|4|5)
if has ${myiuse} ${IUSE}; then
if use ${myiuse}; then
_init_apache2
else
_init_no_apache
fi
fi
;;
*)
if in_iuse ${myiuse}; then
if use ${myiuse}; then
_init_apache2
_init_apache2_late
else
_init_no_apache
fi
fi
;;
esac
}
# @FUNCTION: want_apache
# @USAGE: [myiuse]
# @DESCRIPTION:
# An ebuild calls this to get the dependency information for optional apache
# support. If the myiuse parameter is not given it defaults to apache2.
# An ebuild should additionally call depend.apache_pkg_setup() in pkg_setup()
# with the same myiuse parameter.
want_apache() {
debug-print-function $FUNCNAME $*
want_apache2 "$@"
}
# @FUNCTION: want_apache2
# @USAGE: [myiuse]
# @DESCRIPTION:
# An ebuild calls this to get the dependency information for optional apache-2.x
# support. If the myiuse parameter is not given it defaults to apache2.
# An ebuild should additionally call depend.apache_pkg_setup() in pkg_setup()
# with the same myiuse parameter.
want_apache2() {
debug-print-function $FUNCNAME $*
local myiuse=${1:-apache2}
IUSE="${IUSE} ${myiuse}"
DEPEND="${DEPEND} ${myiuse}? ( ${APACHE2_DEPEND} )"
RDEPEND="${RDEPEND} ${myiuse}? ( ${APACHE2_DEPEND} )"
}
# @FUNCTION: want_apache2_2
# @USAGE: [myiuse]
# @DESCRIPTION:
# An ebuild calls this to get the dependency information for optional
# apache-2.2.x support. If the myiuse parameter is not given it defaults to
# apache2.
# An ebuild should additionally call depend.apache_pkg_setup() in pkg_setup()
# with the same myiuse parameter.
want_apache2_2() {
debug-print-function $FUNCNAME $*
local myiuse=${1:-apache2}
IUSE="${IUSE} ${myiuse}"
DEPEND="${DEPEND} ${myiuse}? ( ${APACHE2_2_DEPEND} )"
RDEPEND="${RDEPEND} ${myiuse}? ( ${APACHE2_2_DEPEND} )"
}
# @FUNCTION: want_apache2_4
# @USAGE: [myiuse]
# @DESCRIPTION:
# An ebuild calls this to get the dependency information for optional
# apache-2.4.x support. If the myiuse parameter is not given it defaults to
# apache2.
# An ebuild should additionally call depend.apache_pkg_setup() in pkg_setup()
# with the same myiuse parameter.
want_apache2_4() {
debug-print-function $FUNCNAME $*
local myiuse=${1:-apache2}
IUSE="${IUSE} ${myiuse}"
DEPEND="${DEPEND} ${myiuse}? ( ${APACHE2_4_DEPEND} )"
RDEPEND="${RDEPEND} ${myiuse}? ( ${APACHE2_4_DEPEND} )"
}
# @FUNCTION: need_apache
# @DESCRIPTION:
# An ebuild calls this to get the dependency information for apache.
need_apache() {
debug-print-function $FUNCNAME $*
need_apache2
}
# @FUNCTION: need_apache2
# @DESCRIPTION:
# An ebuild calls this to get the dependency information for apache-2.x.
need_apache2() {
debug-print-function $FUNCNAME $*
DEPEND="${DEPEND} ${APACHE2_DEPEND}"
RDEPEND="${RDEPEND} ${APACHE2_DEPEND}"
_init_apache2
}
# @FUNCTION: need_apache2_2
# @DESCRIPTION:
# An ebuild calls this to get the dependency information for apache-2.2.x.
need_apache2_2() {
debug-print-function $FUNCNAME $*
DEPEND="${DEPEND} ${APACHE2_2_DEPEND}"
RDEPEND="${RDEPEND} ${APACHE2_2_DEPEND}"
_init_apache2
}
# @FUNCTION: need_apache2_4
# @DESCRIPTION:
# An ebuild calls this to get the dependency information for apache-2.4.x.
need_apache2_4() {
debug-print-function $FUNCNAME $*
DEPEND="${DEPEND} ${APACHE2_4_DEPEND}"
RDEPEND="${RDEPEND} ${APACHE2_4_DEPEND}"
_init_apache2
}
# @FUNCTION: has_apache
# @DESCRIPTION:
# An ebuild calls this to get runtime variables for an indirect apache
# dependency without USE-flag, in which case want_apache does not work.
# DO NOT call this function in global scope.
has_apache() {
debug-print-function $FUNCNAME $*
if has_version '>=www-servers/apache-2'; then
_init_apache2
else
_init_no_apache
fi
}
# @FUNCTION: has_apache_threads
# @USAGE: [myflag]
# @DESCRIPTION:
# An ebuild calls this to make sure thread-safety is enabled if apache has been
# built with a threaded MPM. If the myflag parameter is not given it defaults to
# threads.
has_apache_threads() {
debug-print-function $FUNCNAME $*
case ${EAPI:-0} in
0|1)
die "depend.apache.eclass: has_apache_threads is not supported for EAPI=${EAPI:-0}"
;;
esac
if ! has_version 'www-servers/apache[threads]'; then
return
fi
local myflag="${1:-threads}"
if ! use ${myflag}; then
echo
eerror "You need to enable USE flag '${myflag}' to build a thread-safe version"
eerror "of ${CATEGORY}/${PN} for use with www-servers/apache"
die "Need missing USE flag '${myflag}'"
fi
}
# @FUNCTION: has_apache_threads_in
# @USAGE: <myforeign> [myflag]
# @DESCRIPTION:
# An ebuild calls this to make sure thread-safety is enabled in a foreign
# package if apache has been built with a threaded MPM. If the myflag parameter
# is not given it defaults to threads.
has_apache_threads_in() {
debug-print-function $FUNCNAME $*
case ${EAPI:-0} in
0|1)
die "depend.apache.eclass: has_apache_threads_in is not supported for EAPI=${EAPI:-0}"
;;
esac
if ! has_version 'www-servers/apache[threads]'; then
return
fi
local myforeign="$1"
local myflag="${2:-threads}"
if ! has_version "${myforeign}[${myflag}]"; then
echo
eerror "You need to enable USE flag '${myflag}' in ${myforeign} to"
eerror "build a thread-safe version of ${CATEGORY}/${PN} for use"
eerror "with www-servers/apache"
die "Need missing USE flag '${myflag}' in ${myforeign}"
fi
}
EXPORT_FUNCTIONS pkg_setup

398
eclass/desktop.eclass Normal file
View File

@ -0,0 +1,398 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: desktop.eclass
# @MAINTAINER:
# base-system@gentoo.org
# @BLURB: support for desktop files, menus, and icons
if [[ -z ${_DESKTOP_ECLASS} ]]; then
_DESKTOP_ECLASS=1
# @FUNCTION: make_desktop_entry
# @USAGE: <command> [name] [icon] [type] [fields]
# @DESCRIPTION:
# Make a .desktop file.
#
# @CODE
# binary: what command does the app run with ?
# name: the name that will show up in the menu
# icon: the icon to use in the menu entry
# this can be relative (to /usr/share/pixmaps) or
# a full path to an icon
# type: what kind of application is this?
# for categories:
# https://specifications.freedesktop.org/menu-spec/latest/apa.html
# if unset, function tries to guess from package's category
# fields: extra fields to append to the desktop file; a printf string
# @CODE
make_desktop_entry() {
[[ -z $1 ]] && die "make_desktop_entry: You must specify the executable"
local exec=${1}
local name=${2:-${PN}}
local icon=${3:-${PN}}
local type=${4}
local fields=${5}
if [[ -z ${type} ]] ; then
local catmaj=${CATEGORY%%-*}
local catmin=${CATEGORY##*-}
case ${catmaj} in
app)
case ${catmin} in
accessibility) type="Utility;Accessibility";;
admin) type=System;;
antivirus) type=System;;
arch) type="Utility;Archiving";;
backup) type="Utility;Archiving";;
cdr) type="AudioVideo;DiscBurning";;
dicts) type="Office;Dictionary";;
doc) type=Documentation;;
editors) type="Utility;TextEditor";;
emacs) type="Development;TextEditor";;
emulation) type="System;Emulator";;
laptop) type="Settings;HardwareSettings";;
office) type=Office;;
pda) type="Office;PDA";;
vim) type="Development;TextEditor";;
xemacs) type="Development;TextEditor";;
esac
;;
dev)
type="Development"
;;
games)
case ${catmin} in
action|fps) type=ActionGame;;
arcade) type=ArcadeGame;;
board) type=BoardGame;;
emulation) type=Emulator;;
kids) type=KidsGame;;
puzzle) type=LogicGame;;
roguelike) type=RolePlaying;;
rpg) type=RolePlaying;;
simulation) type=Simulation;;
sports) type=SportsGame;;
strategy) type=StrategyGame;;
esac
type="Game;${type}"
;;
gnome)
type="Gnome;GTK"
;;
kde)
type="KDE;Qt"
;;
mail)
type="Network;Email"
;;
media)
case ${catmin} in
gfx)
type=Graphics
;;
*)
case ${catmin} in
radio) type=Tuner;;
sound) type=Audio;;
tv) type=TV;;
video) type=Video;;
esac
type="AudioVideo;${type}"
;;
esac
;;
net)
case ${catmin} in
dialup) type=Dialup;;
ftp) type=FileTransfer;;
im) type=InstantMessaging;;
irc) type=IRCClient;;
mail) type=Email;;
news) type=News;;
nntp) type=News;;
p2p) type=FileTransfer;;
voip) type=Telephony;;
esac
type="Network;${type}"
;;
sci)
case ${catmin} in
astro*) type=Astronomy;;
bio*) type=Biology;;
calc*) type=Calculator;;
chem*) type=Chemistry;;
elec*) type=Electronics;;
geo*) type=Geology;;
math*) type=Math;;
physics) type=Physics;;
visual*) type=DataVisualization;;
esac
type="Education;Science;${type}"
;;
sys)
type="System"
;;
www)
case ${catmin} in
client) type=WebBrowser;;
esac
type="Network;${type}"
;;
*)
type=
;;
esac
fi
local slot=${SLOT%/*}
if [[ ${slot} == "0" ]] ; then
local desktop_name="${PN}"
else
local desktop_name="${PN}-${slot}"
fi
local desktop="${exec%%[[:space:]]*}"
desktop="${T}/${desktop##*/}-${desktop_name}.desktop"
# Don't append another ";" when a valid category value is provided.
type=${type%;}${type:+;}
if [[ -n ${icon} && ${icon} != /* ]] && [[ ${icon} == *.xpm || ${icon} == *.png || ${icon} == *.svg ]]; then
ewarn "As described in the Icon Theme Specification, icon file extensions are not"
ewarn "allowed in .desktop files if the value is not an absolute path."
icon=${icon%.*}
fi
cat <<-EOF > "${desktop}" || die
[Desktop Entry]
Name=${name}
Type=Application
Comment=${DESCRIPTION}
Exec=${exec}
TryExec=${exec%% *}
Icon=${icon}
Categories=${type}
EOF
if [[ ${fields:-=} != *=* ]] ; then
# 5th arg used to be value to Path=
ewarn "make_desktop_entry: update your 5th arg to read Path=${fields}"
fields="Path=${fields}"
fi
if [[ -n ${fields} ]]; then
printf '%b\n' "${fields}" >> "${desktop}" || die
fi
(
# wrap the env here so that the 'insinto' call
# doesn't corrupt the env of the caller
insopts -m 0644
insinto /usr/share/applications
doins "${desktop}"
) || die "installing desktop file failed"
}
# @FUNCTION: make_session_desktop
# @USAGE: <title> <command> [command args...]
# @DESCRIPTION:
# Make a GDM/KDM Session file. The title is the file to execute to start the
# Window Manager. The command is the name of the Window Manager.
#
# You can set the name of the file via the ${wm} variable.
make_session_desktop() {
[[ -z $1 ]] && eerror "$0: You must specify the title" && return 1
[[ -z $2 ]] && eerror "$0: You must specify the command" && return 1
local title=$1
local command=$2
local desktop=${T}/${wm:-${PN}}.desktop
shift 2
cat <<-EOF > "${desktop}" || die
[Desktop Entry]
Name=${title}
Comment=This session logs you into ${title}
Exec=${command} $*
TryExec=${command}
Type=XSession
EOF
(
# wrap the env here so that the 'insinto' call
# doesn't corrupt the env of the caller
insopts -m 0644
insinto /usr/share/xsessions
doins "${desktop}"
)
}
# @FUNCTION: domenu
# @USAGE: <menus>
# @DESCRIPTION:
# Install the list of .desktop menu files into the appropriate directory
# (/usr/share/applications).
domenu() {
(
# wrap the env here so that the 'insinto' call
# doesn't corrupt the env of the caller
local i ret=0
insopts -m 0644
insinto /usr/share/applications
for i in "$@" ; do
if [[ -d ${i} ]] ; then
doins "${i}"/*.desktop
((ret|=$?))
else
doins "${i}"
((ret|=$?))
fi
done
exit ${ret}
)
}
# @FUNCTION: newmenu
# @USAGE: <menu> <newname>
# @DESCRIPTION:
# Like all other new* functions, install the specified menu as newname.
newmenu() {
(
# wrap the env here so that the 'insinto' call
# doesn't corrupt the env of the caller
insopts -m 0644
insinto /usr/share/applications
newins "$@"
)
}
# @FUNCTION: _iconins
# @INTERNAL
# @DESCRIPTION:
# function for use in doicon and newicon
_iconins() {
(
# wrap the env here so that the 'insinto' call
# doesn't corrupt the env of the caller
insopts -m 0644
local funcname=$1; shift
local size dir
local context=apps
local theme=hicolor
while [[ $# -gt 0 ]] ; do
case $1 in
-s|--size)
if [[ ${2%%x*}x${2%%x*} == "$2" ]] ; then
size=${2%%x*}
else
size=${2}
fi
case ${size} in
16|22|24|32|36|48|64|72|96|128|192|256|512)
size=${size}x${size};;
scalable)
;;
*)
eerror "${size} is an unsupported icon size!"
exit 1;;
esac
shift 2;;
-t|--theme)
theme=${2}
shift 2;;
-c|--context)
context=${2}
shift 2;;
*)
if [[ -z ${size} ]] ; then
insinto /usr/share/pixmaps
else
insinto /usr/share/icons/${theme}/${size}/${context}
fi
if [[ ${funcname} == doicon ]] ; then
if [[ -f $1 ]] ; then
doins "${1}"
elif [[ -d $1 ]] ; then
shopt -s nullglob
doins "${1}"/*.{png,svg}
shopt -u nullglob
else
eerror "${1} is not a valid file/directory!"
exit 1
fi
else
break
fi
shift 1;;
esac
done
if [[ ${funcname} == newicon ]] ; then
newins "$@"
fi
) || die
}
# @FUNCTION: doicon
# @USAGE: [options] <icons>
# @DESCRIPTION:
# Install icon into the icon directory /usr/share/icons or into
# /usr/share/pixmaps if "--size" is not set.
# This is useful in conjunction with creating desktop/menu files.
#
# @CODE
# options:
# -s, --size
# !!! must specify to install into /usr/share/icons/... !!!
# size of the icon, like 48 or 48x48
# supported icon sizes are:
# 16 22 24 32 36 48 64 72 96 128 192 256 512 scalable
# -c, --context
# defaults to "apps"
# -t, --theme
# defaults to "hicolor"
#
# icons: list of icons
#
# example 1: doicon foobar.png fuqbar.svg suckbar.png
# results in: insinto /usr/share/pixmaps
# doins foobar.png fuqbar.svg suckbar.png
#
# example 2: doicon -s 48 foobar.png fuqbar.png blobbar.png
# results in: insinto /usr/share/icons/hicolor/48x48/apps
# doins foobar.png fuqbar.png blobbar.png
# @CODE
doicon() {
_iconins ${FUNCNAME} "$@"
}
# @FUNCTION: newicon
# @USAGE: [options] <icon> <newname>
# @DESCRIPTION:
# Like doicon, install the specified icon as newname.
#
# @CODE
# example 1: newicon foobar.png NEWNAME.png
# results in: insinto /usr/share/pixmaps
# newins foobar.png NEWNAME.png
#
# example 2: newicon -s 48 foobar.png NEWNAME.png
# results in: insinto /usr/share/icons/hicolor/48x48/apps
# newins foobar.png NEWNAME.png
# @CODE
newicon() {
_iconins ${FUNCNAME} "$@"
}
fi

View File

@ -0,0 +1,161 @@
# Copyright 2020-2021 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: dist-kernel-utils.eclass
# @MAINTAINER:
# Distribution Kernel Project <dist-kernel@gentoo.org>
# @AUTHOR:
# Michał Górny <mgorny@gentoo.org>
# @SUPPORTED_EAPIS: 7
# @BLURB: Utility functions related to Distribution Kernels
# @DESCRIPTION:
# This eclass provides various utility functions related to Distribution
# Kernels.
if [[ ! ${_DIST_KERNEL_UTILS} ]]; then
case "${EAPI:-0}" in
0|1|2|3|4|5|6)
die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
;;
7)
;;
*)
die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
;;
esac
# @FUNCTION: dist-kernel_build_initramfs
# @USAGE: <output> <version>
# @DESCRIPTION:
# Build an initramfs for the kernel. <output> specifies the absolute
# path where initramfs will be created, while <version> specifies
# the kernel version, used to find modules.
#
# Note: while this function uses dracut at the moment, other initramfs
# variants may be supported in the future.
dist-kernel_build_initramfs() {
debug-print-function ${FUNCNAME} "${@}"
[[ ${#} -eq 2 ]] || die "${FUNCNAME}: invalid arguments"
local output=${1}
local version=${2}
local rel_image_path=$(dist-kernel_get_image_path)
local image=${output%/*}/${rel_image_path##*/}
local args=(
--force
# if uefi=yes is used, dracut needs to locate the kernel image
--kernel-image "${image}"
# positional arguments
"${output}" "${version}"
)
ebegin "Building initramfs via dracut"
dracut "${args[@]}"
eend ${?} || die -n "Building initramfs failed"
}
# @FUNCTION: dist-kernel_get_image_path
# @DESCRIPTION:
# Get relative kernel image path specific to the current ${ARCH}.
dist-kernel_get_image_path() {
case ${ARCH} in
amd64|x86)
echo arch/x86/boot/bzImage
;;
arm64)
echo arch/arm64/boot/Image.gz
;;
arm)
echo arch/arm/boot/zImage
;;
ppc64)
# ./ is required because of ${image_path%/*}
# substitutions in the code
echo ./vmlinux
;;
*)
die "${FUNCNAME}: unsupported ARCH=${ARCH}"
;;
esac
}
# @FUNCTION: dist-kernel_install_kernel
# @USAGE: <version> <image> <system.map>
# @DESCRIPTION:
# Install kernel using installkernel tool. <version> specifies
# the kernel version, <image> full path to the image, <system.map>
# full path to System.map.
dist-kernel_install_kernel() {
debug-print-function ${FUNCNAME} "${@}"
[[ ${#} -eq 3 ]] || die "${FUNCNAME}: invalid arguments"
local version=${1}
local image=${2}
local map=${3}
# if dracut is used in uefi=yes mode, initrd will actually
# be a combined kernel+initramfs UEFI executable. we can easily
# recognize it by PE magic (vs cpio for a regular initramfs)
local initrd=${image%/*}/initrd
local magic
[[ -s ${initrd} ]] && read -n 2 magic < "${initrd}"
if [[ ${magic} == MZ ]]; then
einfo "Combined UEFI kernel+initramfs executable found"
# install the combined executable in place of kernel
image=${initrd}.uefi
mv "${initrd}" "${image}" || die
# put an empty file in place of initrd. installing a duplicate
# file would waste disk space, and removing it entirely provokes
# kernel-install to regenerate it via dracut.
> "${initrd}"
fi
ebegin "Installing the kernel via installkernel"
# note: .config is taken relatively to System.map;
# initrd relatively to bzImage
installkernel "${version}" "${image}" "${map}"
eend ${?} || die -n "Installing the kernel failed"
}
# @FUNCTION: dist-kernel_reinstall_initramfs
# @USAGE: <kv-dir> <kv-full>
# @DESCRIPTION:
# Rebuild and install initramfs for the specified dist-kernel.
# <kv-dir> is the kernel source directory (${KV_DIR} from linux-info),
# while <kv-full> is the full kernel version (${KV_FULL}).
# The function will determine whether <kernel-dir> is actually
# a dist-kernel, and whether initramfs was used.
#
# This function is to be used in pkg_postinst() of ebuilds installing
# kernel modules that are included in the initramfs.
dist-kernel_reinstall_initramfs() {
debug-print-function ${FUNCNAME} "${@}"
[[ ${#} -eq 2 ]] || die "${FUNCNAME}: invalid arguments"
local kernel_dir=${1}
local ver=${2}
local image_path=${kernel_dir}/$(dist-kernel_get_image_path)
local initramfs_path=${image_path%/*}/initrd
if [[ ! -f ${image_path} ]]; then
eerror "Kernel install missing, image not found:"
eerror " ${image_path}"
eerror "Initramfs will not be updated. Please reinstall your kernel."
return
fi
if [[ ! -f ${initramfs_path} ]]; then
einfo "No initramfs found at ${initramfs_path}"
return
fi
dist-kernel_build_initramfs "${initramfs_path}" "${ver}"
dist-kernel_install_kernel "${ver}" "${image_path}" \
"${kernel_dir}/System.map"
}
_DIST_KERNEL_UTILS=1
fi

1222
eclass/distutils-r1.eclass Normal file

File diff suppressed because it is too large Load Diff

396
eclass/docs.eclass Normal file
View File

@ -0,0 +1,396 @@
# Copyright 1999-2021 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: docs.eclass
# @MAINTAINER:
# Andrew Ammerlaan <andrewammerlaan@riseup.net>
# @AUTHOR:
# Author: Andrew Ammerlaan <andrewammerlaan@riseup.net>
# Based on the work of: Michał Górny <mgorny@gentoo.org>
# @SUPPORTED_EAPIS: 6 7
# @BLURB: A simple eclass to build documentation.
# @DESCRIPTION:
# A simple eclass providing basic functions and variables to build
# documentation.
#
# Please note that this eclass appends to RDEPEND and DEPEND
# unconditionally for you.
#
# This eclass also appends "doc" to IUSE, and sets HTML_DOCS
# to the location of the compiled documentation automatically,
# 'einstalldocs' will then automatically install the documentation
# to the correct directory.
#
# The aim of this eclass is to make it easy to add additional
# doc builders. To do this, add a <DOCS_BUILDER>_deps and
# <DOCS_BUILDER>_compile function for your doc builder.
# For python based doc builders you can use the
# python_append_deps function to append [${PYTHON_USEDEP}]
# automatically to additional dependencies.
#
# Example use doxygen:
# @CODE
# DOCS_BUILDER="doxygen"
# DOCS_DEPEND="media-gfx/imagemagick"
# DOCS_DIR="docs"
#
# inherit docs
#
# ...
#
# src_compile() {
# default
# docs_compile
# }
# @CODE
#
# Example use mkdocs with distutils-r1:
# @CODE
# DOCS_BUILDER="mkdocs"
# DOCS_DEPEND="dev-python/mkdocs-material"
# DOCS_DIR="doc"
#
# PYTHON_COMPAT=( python3_{7,8,9} )
#
# inherit distutils-r1 docs
#
# ...
# @CODE
case "${EAPI:-0}" in
0|1|2|3|4|5)
die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
;;
6|7)
;;
*)
die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
;;
esac
# @ECLASS-VARIABLE: DOCS_BUILDER
# @REQUIRED
# @PRE_INHERIT
# @DESCRIPTION:
# Sets the doc builder to use, currently supports
# sphinx, mkdocs and doxygen.
# PYTHON_COMPAT should be set for python based
# doc builders: sphinx and mkdocs
# @ECLASS-VARIABLE: DOCS_DIR
# @DESCRIPTION:
# Path containing the doc builder config file(s).
#
# For sphinx this is the location of "conf.py"
#
# For mkdocs this is the location of "mkdocs.yml"
# Note that mkdocs.yml often does not reside
# in the same directory as the actual doc files
#
# For doxygen the default name is Doxyfile, but
# package may use a non-standard name. If this
# is the case one should set DOCS_CONFIG_NAME to
# the correct name
#
# Defaults to ${S}
# @ECLASS-VARIABLE: DOCS_DEPEND
# @DEFAULT_UNSET
# @PRE_INHERIT
# @DESCRIPTION:
# Sets additional dependencies required to build the
# documentation.
# For sphinx and mkdocs these dependencies should
# be specified *without* [${PYTHON_USEDEP}], this
# is added by the eclass. E.g. to depend on mkdocs-material:
#
# @CODE
# DOCS_DEPEND="dev-python/mkdocs-material"
# @CODE
#
# This eclass appends to this variable, this makes it
# possible to call it later in your ebuild again if
# necessary.
# @ECLASS-VARIABLE: DOCS_AUTODOC
# @PRE_INHERIT
# @DESCRIPTION:
# Sets whether to use sphinx.ext.autodoc/mkautodoc
# Defaults to 1 (True) for sphinx, and 0 (False) for mkdocs.
# Not relevant for doxygen.
# @ECLASS-VARIABLE: DOCS_OUTDIR
# @DESCRIPTION:
# Sets the directory where the documentation should
# be built into. There is no real reason to change this.
# However, this variable is useful if the package should
# also install other HTML files.
#
# Example use:
# @CODE
# HTML_DOCS=( "${yourdocs}" "${DOCS_OUTDIR}/." )
# @CODE
#
# Defaults to ${S}/_build/html
# @ECLASS-VARIABLE: DOCS_CONFIG_NAME
# @DESCRIPTION:
# Name of the doc builder config file.
#
# Only relevant for doxygen, as it allows
# config files with non-standard names.
# Does not do anything for mkdocs or sphinx.
#
# Defaults to Doxyfile for doxygen
if [[ ! ${_DOCS} ]]; then
# For the python based DOCS_BUILDERS we need to inherit any python eclass
case ${DOCS_BUILDER} in
"sphinx"|"mkdocs")
# We need the python_gen_any_dep function
if [[ ! ${_PYTHON_R1} && ! ${_PYTHON_ANY_R1} && ! ${_PYTHON_SINGLE_R1} ]]; then
die "distutils-r1, python-r1, python-single-r1 or python-any-r1 needs to be inherited to use python based documentation builders"
fi
;;
"doxygen")
# do not need to inherit anything for doxygen
;;
"")
die "DOCS_BUILDER unset, should be set to use ${ECLASS}"
;;
*)
die "Unsupported DOCS_BUILDER=${DOCS_BUILDER} (unknown) for ${ECLASS}"
;;
esac
# @FUNCTION: python_append_dep
# @INTERNAL
# @DESCRIPTION:
# Appends [\${PYTHON_USEDEP}] to all dependencies
# for python based DOCS_BUILDERs such as mkdocs or
# sphinx.
python_append_deps() {
debug-print-function ${FUNCNAME}
local temp
local dep
for dep in ${DOCS_DEPEND[@]}; do
temp+=" ${dep}[\${PYTHON_USEDEP}]"
done
DOCS_DEPEND=${temp}
}
# @FUNCTION: sphinx_deps
# @INTERNAL
# @DESCRIPTION:
# Sets dependencies for sphinx
sphinx_deps() {
debug-print-function ${FUNCNAME}
: ${DOCS_AUTODOC:=1}
deps="dev-python/sphinx[\${PYTHON_USEDEP}]
${DOCS_DEPEND}"
if [[ ${DOCS_AUTODOC} == 0 ]]; then
if [[ -n "${DOCS_DEPEND}" ]]; then
die "${FUNCNAME}: do not set DOCS_AUTODOC to 0 if external plugins are used"
fi
elif [[ ${DOCS_AUTODOC} != 0 && ${DOCS_AUTODOC} != 1 ]]; then
die "${FUNCNAME}: DOCS_AUTODOC should be set to 0 or 1"
fi
if [[ ${_PYTHON_SINGLE_R1} ]]; then
DOCS_DEPEND="$(python_gen_cond_dep "${deps}")"
else
DOCS_DEPEND="$(python_gen_any_dep "${deps}")"
fi
}
# @FUNCTION: sphinx_compile
# @INTERNAL
# @DESCRIPTION:
# Calls sphinx to build docs.
#
# If you overwrite python_compile_all do not call
# this function, call docs_compile instead
sphinx_compile() {
debug-print-function ${FUNCNAME}
use doc || return
local confpy=${DOCS_DIR}/conf.py
[[ -f ${confpy} ]] ||
die "${FUNCNAME}: ${confpy} not found, DOCS_DIR=${DOCS_DIR} call wrong"
if [[ ${DOCS_AUTODOC} == 0 ]]; then
if grep -F -q 'sphinx.ext.autodoc' "${confpy}"; then
die "${FUNCNAME}: autodoc disabled but sphinx.ext.autodoc found in ${confpy}"
fi
elif [[ ${DOCS_AUTODOC} == 1 ]]; then
if ! grep -F -q 'sphinx.ext.autodoc' "${confpy}"; then
die "${FUNCNAME}: sphinx.ext.autodoc not found in ${confpy}, set DOCS_AUTODOC=0"
fi
fi
sed -i -e 's:^intersphinx_mapping:disabled_&:' \
"${DOCS_DIR}"/conf.py || die
# not all packages include the Makefile in pypi tarball
sphinx-build -b html -d "${DOCS_OUTDIR}"/_build/doctrees "${DOCS_DIR}" \
"${DOCS_OUTDIR}" || die "${FUNCNAME}: sphinx-build failed"
}
# @FUNCTION: mkdocs_deps
# @INTERNAL
# @DESCRIPTION:
# Sets dependencies for mkdocs
mkdocs_deps() {
debug-print-function ${FUNCNAME}
: ${DOCS_AUTODOC:=0}
deps="dev-python/mkdocs[\${PYTHON_USEDEP}]
${DOCS_DEPEND}"
if [[ ${DOCS_AUTODOC} == 1 ]]; then
deps="dev-python/mkautodoc[\${PYTHON_USEDEP}]
${deps}"
elif [[ ${DOCS_AUTODOC} != 0 && ${DOCS_AUTODOC} != 1 ]]; then
die "${FUNCNAME}: DOCS_AUTODOC should be set to 0 or 1"
fi
if [[ ${_PYTHON_SINGLE_R1} ]]; then
DOCS_DEPEND="$(python_gen_cond_dep "${deps}")"
else
DOCS_DEPEND="$(python_gen_any_dep "${deps}")"
fi
}
# @FUNCTION: mkdocs_compile
# @INTERNAL
# @DESCRIPTION:
# Calls mkdocs to build docs.
#
# If you overwrite python_compile_all do not call
# this function, call docs_compile instead
mkdocs_compile() {
debug-print-function ${FUNCNAME}
use doc || return
local mkdocsyml=${DOCS_DIR}/mkdocs.yml
[[ -f ${mkdocsyml} ]] ||
die "${FUNCNAME}: ${mkdocsyml} not found, DOCS_DIR=${DOCS_DIR} wrong"
pushd "${DOCS_DIR}" || die
mkdocs build -d "${DOCS_OUTDIR}" || die "${FUNCNAME}: mkdocs build failed"
popd || die
# remove generated .gz variants
# mkdocs currently has no option to disable this
# and portage complains: "Colliding files found by ecompress"
rm "${DOCS_OUTDIR}"/*.gz || die
}
# @FUNCTION: doxygen_deps
# @INTERNAL
# @DESCRIPTION:
# Sets dependencies for doxygen
doxygen_deps() {
debug-print-function ${FUNCNAME}
DOCS_DEPEND="app-doc/doxygen
${DOCS_DEPEND}"
}
# @FUNCTION: doxygen_compile
# @INTERNAL
# @DESCRIPTION:
# Calls doxygen to build docs.
doxygen_compile() {
debug-print-function ${FUNCNAME}
use doc || return
: ${DOCS_CONFIG_NAME:="Doxyfile"}
local doxyfile=${DOCS_DIR}/${DOCS_CONFIG_NAME}
[[ -f ${doxyfile} ]] ||
die "${FUNCNAME}: ${doxyfile} not found, DOCS_DIR=${DOCS_DIR} or DOCS_CONFIG_NAME=${DOCS_CONFIG_NAME} wrong"
# doxygen wants the HTML_OUTPUT dir to already exist
mkdir -p "${DOCS_OUTDIR}" || die
pushd "${DOCS_DIR}" || die
(cat "${DOCS_CONFIG_NAME}" ; echo "HTML_OUTPUT=${DOCS_OUTDIR}") | doxygen - || die "${FUNCNAME}: doxygen failed"
popd || die
}
# @FUNCTION: docs_compile
# @DESCRIPTION:
# Calls DOCS_BUILDER and sets HTML_DOCS
#
# This function must be called in src_compile. Take care not to
# overwrite the variables set by it. If distutils-r1 is inherited
# *before* this eclass, than docs_compile will be automatically
# added to python_compile_all() and there is no need to call
# it manually. Note that this function checks if USE="doc" is
# enabled, and if not automatically exits. Therefore, there is
# no need to wrap this function in a if statement.
#
# Example use:
# @CODE
# src_compile() {
# default
# docs_compile
# }
# @CODE
docs_compile() {
debug-print-function ${FUNCNAME}
use doc || return
# Set a sensible default as DOCS_DIR
: ${DOCS_DIR:="${S}"}
# Where to put the compiled files?
: ${DOCS_OUTDIR:="${S}/_build/html"}
${DOCS_BUILDER}_compile
HTML_DOCS+=( "${DOCS_OUTDIR}/." )
# we need to ensure successful return in case we're called last,
# otherwise Portage may wrongly assume sourcing failed
return 0
}
# This is where we setup the USE/(B)DEPEND variables
# and call the doc builder specific setup functions
IUSE+=" doc"
# Call the correct setup function
case ${DOCS_BUILDER} in
"sphinx")
python_append_deps
sphinx_deps
;;
"mkdocs")
python_append_deps
mkdocs_deps
;;
"doxygen")
doxygen_deps
;;
esac
if [[ ${EAPI} == [7] ]]; then
BDEPEND+=" doc? ( ${DOCS_DEPEND} )"
else
DEPEND+=" doc? ( ${DOCS_DEPEND} )"
fi
# If this is a python package using distutils-r1
# then put the compile function in the specific
# python function, else docs_compile should be manually
# added to src_compile
if [[ ${_DISTUTILS_R1} && ( ${DOCS_BUILDER}="mkdocs" || ${DOCS_BUILDER}="sphinx" ) ]]; then
python_compile_all() { docs_compile; }
fi
_DOCS=1
fi

145
eclass/dotnet.eclass Normal file
View File

@ -0,0 +1,145 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: dotnet.eclass
# @MAINTAINER: dotnet@gentoo.org
# @SUPPORTED_EAPIS: 1 2 3 4 5 6 7
# @BLURB: common settings and functions for mono and dotnet related packages
# @DESCRIPTION:
# The dotnet eclass contains common environment settings that are useful for
# dotnet packages. Currently, it provides no functions, just exports
# MONO_SHARED_DIR and sets LC_ALL in order to prevent errors during compilation
# of dotnet packages.
case ${EAPI:-0} in
0)
die "this eclass doesn't support EAPI 0" ;;
[1-6])
inherit eapi7-ver multilib
DEPEND="dev-lang/mono" ;;
*)
BDEPEND="dev-lang/mono" ;;
esac
inherit mono-env
# @ECLASS-VARIABLE: USE_DOTNET
# @DESCRIPTION:
# Use flags added to IUSE
# SET default use flags according on DOTNET_TARGETS
for x in ${USE_DOTNET}; do
case ${x} in
net45) if [[ ${DOTNET_TARGETS} == *net45* ]]; then IUSE+=" +net45"; else IUSE+=" net45"; fi;;
net40) if [[ ${DOTNET_TARGETS} == *net40* ]]; then IUSE+=" +net40"; else IUSE+=" net40"; fi;;
net35) if [[ ${DOTNET_TARGETS} == *net35* ]]; then IUSE+=" +net35"; else IUSE+=" net35"; fi;;
net20) if [[ ${DOTNET_TARGETS} == *net20* ]]; then IUSE+=" +net20"; else IUSE+=" net20"; fi;;
esac
done
# @FUNCTION: dotnet_pkg_setup
# @DESCRIPTION:
# This function set FRAMEWORK.
dotnet_pkg_setup() {
for x in ${USE_DOTNET} ; do
case ${x} in
net45) if use net45; then F="4.5"; fi;;
net40) if use net40; then F="4.0"; fi;;
net35) if use net35; then F="3.5"; fi;;
net20) if use net20; then F="2.0"; fi;;
esac
if [[ -z ${FRAMEWORK} ]]; then
if [[ ${F} ]]; then
FRAMEWORK="${F}";
fi
else
ver_test "${F}" -le "${FRAMEWORK}" || FRAMEWORK="${F}"
fi
done
if [[ -z ${FRAMEWORK} ]]; then
FRAMEWORK="4.0"
fi
einfo " -- USING .NET ${FRAMEWORK} FRAMEWORK -- "
}
# >=mono-0.92 versions using mcs -pkg:foo-sharp require shared memory, so we set the
# shared dir to ${T} so that ${T}/.wapi can be used during the install process.
export MONO_SHARED_DIR="${T}"
# Building mono, nant and many other dotnet packages is known to fail if LC_ALL
# variable is not set to C. To prevent this all mono related packages will be
# build with LC_ALL=C (see bugs #146424, #149817)
export LC_ALL=C
# Monodevelop-using applications need this to be set or they will try to create config
# files in the user's ~ dir.
export XDG_CONFIG_HOME="${T}"
# Fix bug 83020:
# "Access Violations Arise When Emerging Mono-Related Packages with MONO_AOT_CACHE"
unset MONO_AOT_CACHE
# @FUNCTION: exbuild
# @DESCRIPTION:
# Run xbuild with Release configuration and configurated FRAMEWORK.
exbuild() {
elog "xbuild ""$@"" /p:Configuration=Release /tv:4.0 /p:TargetFrameworkVersion=v""${FRAMEWORK}"" || die"
xbuild "$@" /p:Configuration=Release /tv:4.0 /p:TargetFrameworkVersion=v"${FRAMEWORK}" || die
}
# @FUNCTION: egacinstall
# @DESCRIPTION:
# Install package to GAC.
egacinstall() {
use !prefix && has "${EAPI:-0}" 0 1 2 && ED="${D}"
gacutil -i "${1}" \
-root "${ED}"/usr/$(get_libdir) \
-gacdir /usr/$(get_libdir) \
-package ${2:-${GACPN:-${PN}}} \
|| die "installing ${1} into the Global Assembly Cache failed"
}
# @FUNCTION: dotnet_multilib_comply
# @DESCRIPTION:
# multilib comply
dotnet_multilib_comply() {
use !prefix && has "${EAPI:-0}" 0 1 2 && ED="${D}"
local dir finddirs=() mv_command=${mv_command:-mv}
if [[ -d "${ED}/usr/lib" && "$(get_libdir)" != "lib" ]]
then
if ! [[ -d "${ED}"/usr/"$(get_libdir)" ]]
then
mkdir "${ED}"/usr/"$(get_libdir)" || die "Couldn't mkdir ${ED}/usr/$(get_libdir)"
fi
${mv_command} "${ED}"/usr/lib/* "${ED}"/usr/"$(get_libdir)"/ || die "Moving files into correct libdir failed"
rm -rf "${ED}"/usr/lib
for dir in "${ED}"/usr/"$(get_libdir)"/pkgconfig "${ED}"/usr/share/pkgconfig
do
if [[ -d "${dir}" && "$(find "${dir}" -name '*.pc')" != "" ]]
then
pushd "${dir}" &> /dev/null
sed -i -r -e 's:/(lib)([^a-zA-Z0-9]|$):/'"$(get_libdir)"'\2:g' \
*.pc \
|| die "Sedding some sense into pkgconfig files failed."
popd "${dir}" &> /dev/null
fi
done
if [[ -d "${ED}/usr/bin" ]]
then
for exe in "${ED}/usr/bin"/*
do
if [[ "$(file "${exe}")" == *"shell script text"* ]]
then
sed -r -i -e ":/lib(/|$): s:/lib(/|$):/$(get_libdir)\1:" \
"${exe}" || die "Sedding some sense into ${exe} failed"
fi
done
fi
fi
}
EXPORT_FUNCTIONS pkg_setup

72
eclass/dune.eclass Normal file
View File

@ -0,0 +1,72 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: dune.eclass
# @MAINTAINER:
# rkitover@gmail.com
# Mark Wright <gienah@gentoo.org>
# @AUTHOR:
# Rafael Kitover <rkitover@gmail.com>
# @SUPPORTED_EAPIS: 5 6 7
# @BLURB: Provides functions for installing dune packages.
# @DESCRIPTION:
# Provides dependencies on dune and ocaml and default src_compile, src_test and
# src_install for dune-based packages.
# @ECLASS-VARIABLE: DUNE_PKG_NAME
# @DESCRIPTION:
# Sets the actual dune package name, if different from gentoo package name.
# Set before inheriting the eclass.
case ${EAPI:-0} in
5|6|7) ;;
*) die "${ECLASS}: EAPI ${EAPI} not supported" ;;
esac
# Do not complain about CFLAGS etc since ml projects do not use them.
QA_FLAGS_IGNORED='.*'
EXPORT_FUNCTIONS src_compile src_test src_install
RDEPEND=">=dev-lang/ocaml-4:=[ocamlopt?]"
case ${EAPI:-0} in
0|1|2|3|4|5|6) DEPEND="${RDEPEND} dev-ml/dune";;
*) BDEPEND="dev-ml/dune dev-lang/ocaml"; DEPEND="${RDEPEND}" ;;
esac
dune_src_compile() {
dune build @install || die
}
dune_src_test() {
dune runtest || die
}
# @FUNCTION: dune-install
# @USAGE: <list of packages>
# @DESCRIPTION:
# Installs the dune packages given as arguments. For each "${pkg}" element in
# that list, "${pkg}.install" must be readable from "${PWD}/_build/default"
dune-install() {
local pkg
for pkg ; do
dune install \
--prefix="${ED%/}/usr" \
--libdir="${D%/}$(ocamlc -where)" \
--mandir="${ED%/}/usr/share/man" \
"${pkg}" || die
done
}
dune_src_install() {
local pkg="${1:-${DUNE_PKG_NAME:-${PN}}}"
dune-install "${pkg}"
# Move docs to the appropriate place.
if [ -d "${ED%/}/usr/doc/${pkg}" ] ; then
mkdir -p "${ED%/}/usr/share/doc/${PF}/" || die
mv "${ED%/}/usr/doc/${pkg}/"* "${ED%/}/usr/share/doc/${PF}/" || die
rm -rf "${ED%/}/usr/doc" || die
fi
}

308
eclass/eapi7-ver.eclass Normal file
View File

@ -0,0 +1,308 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: eapi7-ver.eclass
# @MAINTAINER:
# PMS team <pms@gentoo.org>
# @AUTHOR:
# Ulrich Müller <ulm@gentoo.org>
# Michał Górny <mgorny@gentoo.org>
# @SUPPORTED_EAPIS: 0 1 2 3 4 5 6
# @BLURB: Testing implementation of EAPI 7 version manipulators
# @DESCRIPTION:
# A stand-alone implementation of the version manipulation functions
# aimed for EAPI 7. Intended to be used for wider testing of
# the proposed functions and to allow ebuilds to switch to the new
# model early, with minimal change needed for actual EAPI 7.
#
# https://bugs.gentoo.org/482170
#
# @SUBSECTION Version strings
#
# The functions support arbitrary version strings consisting of version
# components interspersed with (possibly empty) version separators.
#
# A version component can either consist purely of digits ([0-9]+)
# or purely of uppercase and lowercase letters ([A-Za-z]+). A version
# separator is either a string of any other characters ([^A-Za-z0-9]+),
# or it occurs at the transition between a sequence of letters
# and a sequence of digits, or vice versa. In the latter case,
# the version separator is an empty string.
#
# The version is processed left-to-right, and each successive component
# is assigned numbers starting with 1. The components are either split
# on version separators or on boundaries between digits and letters
# (in which case the separator between the components is empty).
# Version separators are assigned numbers starting with 1 for
# the separator between 1st and 2nd components. As a special case,
# if the version string starts with a separator, it is assigned index 0.
#
# Examples:
#
# @CODE
# 1.2b-alpha4 -> 1 . 2 '' b - alpha '' 4
# c s c s c s c s c
# 1 1 2 2 3 3 4 4 5
#
# .11. -> . 11 .
# s c s
# 0 1 1
# @CODE
#
# @SUBSECTION Ranges
#
# A range can be specified as 'm' for m-th version component, 'm-'
# for all components starting with m-th or 'm-n' for components starting
# at m-th and ending at n-th (inclusive). If the range spans outside
# the version string, it is truncated silently.
case ${EAPI:-0} in
0|1|2|3|4|5|6) ;;
7) die "${ECLASS}: EAPI=${EAPI} includes all functions from this eclass" ;;
*) die "${ECLASS}: EAPI=${EAPI} unknown" ;;
esac
# @FUNCTION: _ver_parse_range
# @USAGE: <range> <max>
# @INTERNAL
# @DESCRIPTION:
# Parse the range string <range>, setting 'start' and 'end' variables
# to the appropriate bounds. <max> specifies the appropriate upper
# bound for the range; the user-specified value is truncated to this.
_ver_parse_range() {
local range=${1}
local max=${2}
[[ ${range} == [0-9]* ]] \
|| die "${FUNCNAME}: range must start with a number"
start=${range%-*}
[[ ${range} == *-* ]] && end=${range#*-} || end=${start}
if [[ ${end} ]]; then
[[ ${start} -le ${end} ]] \
|| die "${FUNCNAME}: end of range must be >= start"
[[ ${end} -le ${max} ]] || end=${max}
else
end=${max}
fi
}
# @FUNCTION: _ver_split
# @USAGE: <version>
# @INTERNAL
# @DESCRIPTION:
# Split the version string <version> into separator-component array.
# Sets 'comp' to an array of the form: ( s_0 c_1 s_1 c_2 s_2 c_3... )
# where s_i are separators and c_i are components.
_ver_split() {
local v=${1} LC_ALL=C
comp=()
# get separators and components
local s c
while [[ ${v} ]]; do
# cut the separator
s=${v%%[a-zA-Z0-9]*}
v=${v:${#s}}
# cut the next component; it can be either digits or letters
[[ ${v} == [0-9]* ]] && c=${v%%[^0-9]*} || c=${v%%[^a-zA-Z]*}
v=${v:${#c}}
comp+=( "${s}" "${c}" )
done
}
# @FUNCTION: ver_cut
# @USAGE: <range> [<version>]
# @DESCRIPTION:
# Print the substring of the version string containing components
# defined by the <range> and the version separators between them.
# Processes <version> if specified, ${PV} otherwise.
#
# For the syntax of versions and ranges, please see the eclass
# description.
ver_cut() {
local range=${1}
local v=${2:-${PV}}
local start end
local -a comp
_ver_split "${v}"
local max=$((${#comp[@]}/2))
_ver_parse_range "${range}" "${max}"
if [[ ${start} -gt 0 ]]; then
start=$(( start*2 - 1 ))
fi
# Work around a bug in bash-3.2, where "${comp[*]:start:end*2-start}"
# inserts stray 0x7f characters for empty array elements
printf "%s" "${comp[@]:start:end*2-start}" $'\n'
}
# @FUNCTION: ver_rs
# @USAGE: <range> <repl> [<range> <repl>...] [<version>]
# @DESCRIPTION:
# Print the version string after substituting the specified version
# separators at <range> with <repl> (string). Multiple '<range> <repl>'
# pairs can be specified. Processes <version> if specified,
# ${PV} otherwise.
#
# For the syntax of versions and ranges, please see the eclass
# description.
ver_rs() {
local v
(( ${#} & 1 )) && v=${@: -1} || v=${PV}
local start end i
local -a comp
_ver_split "${v}"
local max=$((${#comp[@]}/2 - 1))
while [[ ${#} -ge 2 ]]; do
_ver_parse_range "${1}" "${max}"
for (( i = start*2; i <= end*2; i+=2 )); do
[[ ${i} -eq 0 && -z ${comp[i]} ]] && continue
comp[i]=${2}
done
shift 2
done
local IFS=
echo "${comp[*]}"
}
# @FUNCTION: _ver_compare_int
# @USAGE: <a> <b>
# @RETURN: 0 if <a> -eq <b>, 1 if <a> -lt <b>, 3 if <a> -gt <b>
# @INTERNAL
# @DESCRIPTION:
# Compare two non-negative integers <a> and <b>, of arbitrary length.
# If <a> is equal to, less than, or greater than <b>, return 0, 1, or 3
# as exit status, respectively.
_ver_compare_int() {
local a=$1 b=$2 d=$(( ${#1}-${#2} ))
# Zero-pad to equal length if necessary.
if [[ ${d} -gt 0 ]]; then
printf -v b "%0${d}d%s" 0 "${b}"
elif [[ ${d} -lt 0 ]]; then
printf -v a "%0$(( -d ))d%s" 0 "${a}"
fi
[[ ${a} > ${b} ]] && return 3
[[ ${a} == "${b}" ]]
}
# @FUNCTION: _ver_compare
# @USAGE: <va> <vb>
# @RETURN: 1 if <va> < <vb>, 2 if <va> = <vb>, 3 if <va> > <vb>
# @INTERNAL
# @DESCRIPTION:
# Compare two versions <va> and <vb>. If <va> is less than, equal to,
# or greater than <vb>, return 1, 2, or 3 as exit status, respectively.
_ver_compare() {
local va=${1} vb=${2} a an al as ar b bn bl bs br re LC_ALL=C
re="^([0-9]+(\.[0-9]+)*)([a-z]?)((_(alpha|beta|pre|rc|p)[0-9]*)*)(-r[0-9]+)?$"
[[ ${va} =~ ${re} ]] || die "${FUNCNAME}: invalid version: ${va}"
an=${BASH_REMATCH[1]}
al=${BASH_REMATCH[3]}
as=${BASH_REMATCH[4]}
ar=${BASH_REMATCH[7]}
[[ ${vb} =~ ${re} ]] || die "${FUNCNAME}: invalid version: ${vb}"
bn=${BASH_REMATCH[1]}
bl=${BASH_REMATCH[3]}
bs=${BASH_REMATCH[4]}
br=${BASH_REMATCH[7]}
# Compare numeric components (PMS algorithm 3.2)
# First component
_ver_compare_int "${an%%.*}" "${bn%%.*}" || return
while [[ ${an} == *.* && ${bn} == *.* ]]; do
# Other components (PMS algorithm 3.3)
an=${an#*.}
bn=${bn#*.}
a=${an%%.*}
b=${bn%%.*}
if [[ ${a} == 0* || ${b} == 0* ]]; then
# Remove any trailing zeros
[[ ${a} =~ 0+$ ]] && a=${a%"${BASH_REMATCH[0]}"}
[[ ${b} =~ 0+$ ]] && b=${b%"${BASH_REMATCH[0]}"}
[[ ${a} > ${b} ]] && return 3
[[ ${a} < ${b} ]] && return 1
else
_ver_compare_int "${a}" "${b}" || return
fi
done
[[ ${an} == *.* ]] && return 3
[[ ${bn} == *.* ]] && return 1
# Compare letter components (PMS algorithm 3.4)
[[ ${al} > ${bl} ]] && return 3
[[ ${al} < ${bl} ]] && return 1
# Compare suffixes (PMS algorithm 3.5)
as=${as#_}${as:+_}
bs=${bs#_}${bs:+_}
while [[ -n ${as} && -n ${bs} ]]; do
# Compare each suffix (PMS algorithm 3.6)
a=${as%%_*}
b=${bs%%_*}
if [[ ${a%%[0-9]*} == "${b%%[0-9]*}" ]]; then
_ver_compare_int "${a##*[a-z]}" "${b##*[a-z]}" || return
else
# Check for p first
[[ ${a%%[0-9]*} == p ]] && return 3
[[ ${b%%[0-9]*} == p ]] && return 1
# Hack: Use that alpha < beta < pre < rc alphabetically
[[ ${a} > ${b} ]] && return 3 || return 1
fi
as=${as#*_}
bs=${bs#*_}
done
if [[ -n ${as} ]]; then
[[ ${as} == p[_0-9]* ]] && return 3 || return 1
elif [[ -n ${bs} ]]; then
[[ ${bs} == p[_0-9]* ]] && return 1 || return 3
fi
# Compare revision components (PMS algorithm 3.7)
_ver_compare_int "${ar#-r}" "${br#-r}" || return
return 2
}
# @FUNCTION: ver_test
# @USAGE: [<v1>] <op> <v2>
# @DESCRIPTION:
# Check if the relation <v1> <op> <v2> is true. If <v1> is not specified,
# default to ${PVR}. <op> can be -gt, -ge, -eq, -ne, -le, -lt.
# Both versions must conform to the PMS version syntax (with optional
# revision parts), and the comparison is performed according to
# the algorithm specified in the PMS.
ver_test() {
local va op vb
if [[ $# -eq 3 ]]; then
va=${1}
shift
else
va=${PVR}
fi
[[ $# -eq 2 ]] || die "${FUNCNAME}: bad number of arguments"
op=${1}
vb=${2}
case ${op} in
-eq|-ne|-lt|-le|-gt|-ge) ;;
*) die "${FUNCNAME}: invalid operator: ${op}" ;;
esac
_ver_compare "${va}" "${vb}"
test $? "${op}" 2
}

108
eclass/eapi8-dosym.eclass Normal file
View File

@ -0,0 +1,108 @@
# Copyright 2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: eapi8-dosym.eclass
# @MAINTAINER:
# PMS team <pms@gentoo.org>
# @AUTHOR:
# Ulrich Müller <ulm@gentoo.org>
# @SUPPORTED_EAPIS: 5 6 7
# @BLURB: Testing implementation of EAPI 8 dosym -r option
# @DESCRIPTION:
# A stand-alone implementation of the dosym command aimed for EAPI 8.
# Intended to be used for wider testing of the proposed option and to
# allow ebuilds to switch to the new model early, with minimal change
# needed for actual EAPI 8.
#
# https://bugs.gentoo.org/708360
case ${EAPI} in
5|6|7) ;;
*) die "${ECLASS}: EAPI=${EAPI:-0} not supported" ;;
esac
# @FUNCTION: _dosym8_canonicalize
# @USAGE: <path>
# @INTERNAL
# @DESCRIPTION:
# Transparent bash-only replacement for GNU "realpath -m -s".
# Resolve references to "/./", "/../" and remove extra "/" characters
# from <path>, without touching any actual file.
_dosym8_canonicalize() {
local path slash i prev out IFS=/
path=( $1 )
[[ $1 == /* ]] && slash=/
while true; do
# Find first instance of non-".." path component followed by "..",
# or as a special case, "/.." at the beginning of the path.
# Also drop empty and "." path components as we go along.
prev=
for i in ${!path[@]}; do
if [[ -z ${path[i]} || ${path[i]} == . ]]; then
unset "path[i]"
elif [[ ${path[i]} != .. ]]; then
prev=${i}
elif [[ ${prev} || ${slash} ]]; then
# Found, remove path components and reiterate
[[ ${prev} ]] && unset "path[prev]"
unset "path[i]"
continue 2
fi
done
# No (further) instance found, so we're done
break
done
out="${slash}${path[*]}"
echo "${out:-.}"
}
# @FUNCTION: dosym8
# @USAGE: [-r] <target> <link>
# @DESCRIPTION:
# Create a symbolic link <link>, pointing to <target>. If the
# directory containing the new link does not exist, create it.
#
# If called with option -r, expand <target> relative to the apparent
# path of the directory containing <link>. For example, "dosym8 -r
# /bin/foo /usr/bin/foo" will create a link named "../../bin/foo".
dosym8() {
local option_r
case $1 in
-r) option_r=t; shift ;;
esac
[[ $# -eq 2 ]] || die "${FUNCNAME}: bad number of arguments"
local target=$1 link=$2
if [[ ${option_r} ]]; then
local linkdir comp
# Expansion makes sense only for an absolute target path
[[ ${target} == /* ]] \
|| die "${FUNCNAME}: -r specified but no absolute target path"
target=$(_dosym8_canonicalize "${target}")
linkdir=$(_dosym8_canonicalize "/${link#/}")
linkdir=${linkdir%/*} # poor man's dirname(1)
linkdir=${linkdir:-/} # always keep the initial "/"
local ifs_save=${IFS-$' \t\n'} IFS=/
for comp in ${linkdir}; do
if [[ ${target%%/*} == "${comp}" ]]; then
target=${target#"${comp}"}
target=${target#/}
else
target=..${target:+/}${target}
fi
done
IFS=${ifs_save}
target=${target:-.}
fi
dosym "${target}" "${link}"
}

588
eclass/ecm.eclass Normal file
View File

@ -0,0 +1,588 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: ecm.eclass
# @MAINTAINER:
# kde@gentoo.org
# @SUPPORTED_EAPIS: 7
# @BLURB: Support eclass for packages that use KDE Frameworks with ECM.
# @DESCRIPTION:
# This eclass is intended to streamline the creation of ebuilds for packages
# that use cmake and KDE Frameworks' extra-cmake-modules, thereby following
# some of their packaging conventions. It is primarily intended for the three
# upstream release groups (Frameworks, Plasma, Applications) but also for any
# other package that follows similar conventions.
#
# This eclass unconditionally inherits cmake.eclass and all its public
# variables and helper functions (not phase functions) may be considered as part
# of this eclass's API.
#
# This eclass's phase functions are not intended to be mixed and matched, so if
# any phase functions are overridden the version here should also be called.
if [[ -z ${_ECM_ECLASS} ]]; then
_ECM_ECLASS=1
# @ECLASS-VARIABLE: VIRTUALX_REQUIRED
# @DESCRIPTION:
# For proper description see virtualx.eclass manpage.
# Here we redefine default value to be manual, if your package needs virtualx
# for tests you should proceed with setting VIRTUALX_REQUIRED=test.
: ${VIRTUALX_REQUIRED:=manual}
# @ECLASS-VARIABLE: ECM_NONGUI
# @DEFAULT_UNSET
# @DESCRIPTION:
# By default, for all CATEGORIES except kde-frameworks, assume we are building
# a GUI application. Add dependency on kde-frameworks/breeze-icons or
# kde-frameworks/oxygen-icons and run the xdg.eclass routines for pkg_preinst,
# pkg_postinst and pkg_postrm. If set to "true", do nothing.
if [[ ${CATEGORY} = kde-frameworks ]] ; then
: ${ECM_NONGUI:=true}
fi
: ${ECM_NONGUI:=false}
inherit cmake flag-o-matic toolchain-funcs virtualx
if [[ ${ECM_NONGUI} = false ]] ; then
inherit xdg
fi
case ${EAPI} in
7) ;;
*) die "EAPI=${EAPI:-0} is not supported" ;;
esac
if [[ -v KDE_GCC_MINIMAL ]]; then
EXPORT_FUNCTIONS pkg_pretend
fi
EXPORT_FUNCTIONS pkg_setup src_prepare src_configure src_test pkg_preinst pkg_postinst pkg_postrm
# @ECLASS-VARIABLE: ECM_KDEINSTALLDIRS
# @DESCRIPTION:
# Assume the package is using KDEInstallDirs macro and switch
# KDE_INSTALL_USE_QT_SYS_PATHS to ON. If set to "false", do nothing.
: ${ECM_KDEINSTALLDIRS:=true}
# @ECLASS-VARIABLE: ECM_DEBUG
# @DESCRIPTION:
# Add "debug" to IUSE. If !debug, add -DQT_NO_DEBUG to CPPFLAGS. If set to
# "false", do nothing.
: ${ECM_DEBUG:=true}
# @ECLASS-VARIABLE: ECM_DESIGNERPLUGIN
# @DESCRIPTION:
# If set to "true", add "designer" to IUSE to toggle build of designer plugins
# and add the necessary BDEPEND. If set to "false", do nothing.
: ${ECM_DESIGNERPLUGIN:=false}
# @ECLASS-VARIABLE: ECM_EXAMPLES
# @DESCRIPTION:
# By default unconditionally ignore a top-level examples subdirectory.
# If set to "true", add "examples" to IUSE to toggle adding that subdirectory.
: ${ECM_EXAMPLES:=false}
# @ECLASS-VARIABLE: ECM_HANDBOOK
# @DESCRIPTION:
# Will accept "true", "false", "optional", "forceoptional". If set to "false",
# do nothing.
# Otherwise, add "+handbook" to IUSE, add the appropriate dependency, and let
# KF5DocTools generate and install the handbook from docbook file(s) found in
# ECM_HANDBOOK_DIR. However if !handbook, disable build of ECM_HANDBOOK_DIR
# in CMakeLists.txt.
# If set to "optional", build with -DCMAKE_DISABLE_FIND_PACKAGE_KF5DocTools=ON
# when !handbook. In case package requires KF5KDELibs4Support, see next:
# If set to "forceoptional", remove a KF5DocTools dependency from the root
# CMakeLists.txt in addition to the above.
: ${ECM_HANDBOOK:=false}
# @ECLASS-VARIABLE: ECM_HANDBOOK_DIR
# @DESCRIPTION:
# Specifies the directory containing the docbook file(s) relative to ${S} to
# be processed by KF5DocTools (kdoctools_install).
: ${ECM_HANDBOOK_DIR:=doc}
# @ECLASS-VARIABLE: ECM_PO_DIRS
# @DESCRIPTION:
# Specifies directories of l10n files relative to ${S} to be processed by
# KF5I18n (ki18n_install). If IUSE nls exists and is disabled then disable
# build of these directories in CMakeLists.txt.
: ${ECM_PO_DIRS:="po poqm"}
# @ECLASS-VARIABLE: ECM_QTHELP
# @DEFAULT_UNSET
# @DESCRIPTION:
# Default value for all CATEGORIES except kde-frameworks is "false".
# If set to "true", add "doc" to IUSE, add the appropriate dependency, let
# -DBUILD_QCH=ON generate and install Qt compressed help files when USE=doc.
# If set to "false", do nothing.
if [[ ${CATEGORY} = kde-frameworks ]]; then
: ${ECM_QTHELP:=true}
fi
: ${ECM_QTHELP:=false}
# @ECLASS-VARIABLE: ECM_TEST
# @DEFAULT_UNSET
# @DESCRIPTION:
# Will accept "true", "false", "optional", "forceoptional",
# "forceoptional-recursive".
# Default value is "false", except for CATEGORY=kde-frameworks where it is
# set to "true". If set to "false", do nothing.
# For any other value, add "test" to IUSE and DEPEND on dev-qt/qttest:5.
# If set to "optional", build with -DCMAKE_DISABLE_FIND_PACKAGE_Qt5Test=ON
# when USE=!test.
# If set to "forceoptional", punt Qt5Test dependency and ignore "autotests",
# "test", "tests" subdirs from top-level CMakeLists.txt when USE=!test.
# If set to "forceoptional-recursive", punt Qt5Test dependencies and make
# autotest(s), unittest(s) and test(s) subdirs from *any* CMakeLists.txt in
# ${S} and below conditional on BUILD_TESTING when USE=!test. This is always
# meant as a short-term fix and creates ${T}/${P}-tests-optional.patch to
# refine and submit upstream.
if [[ ${CATEGORY} = kde-frameworks ]]; then
: ${ECM_TEST:=true}
fi
: ${ECM_TEST:=false}
# @ECLASS-VARIABLE: KFMIN
# @DEFAULT_UNSET
# @DESCRIPTION:
# Minimum version of Frameworks to require. Default value for kde-frameworks
# is ${PV} and 5.64.0 baseline for everything else. This is not going to be
# changed unless we also bump EAPI, which usually implies (rev-)bumping.
# Version will later be used to differentiate between KF5/Qt5 and KF6/Qt6.
if [[ ${CATEGORY} = kde-frameworks ]]; then
: ${KFMIN:=$(ver_cut 1-2)}
fi
: ${KFMIN:=5.64.0}
# @ECLASS-VARIABLE: KFSLOT
# @INTERNAL
# @DESCRIPTION:
# KDE Frameworks and Qt slot dependency, implied by KFMIN version.
: ${KFSLOT:=5}
case ${ECM_NONGUI} in
true) ;;
false)
# gui applications need breeze or oxygen for basic iconset, bug #564838
RDEPEND+=" || (
kde-frameworks/breeze-icons:*
kde-frameworks/oxygen-icons:*
)"
;;
*)
eerror "Unknown value for \${ECM_NONGUI}"
die "Value ${ECM_NONGUI} is not supported"
;;
esac
case ${ECM_DEBUG} in
true)
IUSE+=" debug"
;;
false) ;;
*)
eerror "Unknown value for \${ECM_DEBUG}"
die "Value ${ECM_DEBUG} is not supported"
;;
esac
case ${ECM_DESIGNERPLUGIN} in
true)
IUSE+=" designer"
BDEPEND+=" designer? ( dev-qt/designer:${KFSLOT} )"
;;
false) ;;
*)
eerror "Unknown value for \${ECM_DESIGNERPLUGIN}"
die "Value ${ECM_DESIGNERPLUGIN} is not supported"
;;
esac
case ${ECM_EXAMPLES} in
true)
IUSE+=" examples"
;;
false) ;;
*)
eerror "Unknown value for \${ECM_EXAMPLES}"
die "Value ${ECM_EXAMPLES} is not supported"
;;
esac
case ${ECM_HANDBOOK} in
true|optional|forceoptional)
IUSE+=" +handbook"
BDEPEND+=" handbook? ( >=kde-frameworks/kdoctools-${KFMIN}:${KFSLOT} )"
;;
false) ;;
*)
eerror "Unknown value for \${ECM_HANDBOOK}"
die "Value ${ECM_HANDBOOK} is not supported"
;;
esac
case ${ECM_QTHELP} in
true)
IUSE+=" doc"
COMMONDEPEND+=" doc? ( dev-qt/qt-docs:${KFSLOT} )"
BDEPEND+=" doc? (
>=app-doc/doxygen-1.8.13-r1
dev-qt/qthelp:${KFSLOT}
)"
;;
false) ;;
*)
eerror "Unknown value for \${ECM_QTHELP}"
die "Value ${ECM_QTHELP} is not supported"
;;
esac
case ${ECM_TEST} in
true|optional|forceoptional|forceoptional-recursive)
IUSE+=" test"
DEPEND+=" test? ( dev-qt/qttest:${KFSLOT} )"
RESTRICT+=" !test? ( test )"
;;
false) ;;
*)
eerror "Unknown value for \${ECM_TEST}"
die "Value ${ECM_TEST} is not supported"
;;
esac
BDEPEND+=" >=kde-frameworks/extra-cmake-modules-${KFMIN}:${KFSLOT}"
RDEPEND+=" >=kde-frameworks/kf-env-4"
COMMONDEPEND+=" dev-qt/qtcore:${KFSLOT}"
DEPEND+=" ${COMMONDEPEND}"
RDEPEND+=" ${COMMONDEPEND}"
unset COMMONDEPEND
# @ECLASS-VARIABLE: KDE_GCC_MINIMAL
# @DEFAULT_UNSET
# @DESCRIPTION:
# Minimum version of active GCC to require. This is checked in
# ecm_pkg_pretend and ecm_pkg_setup.
# @FUNCTION: _ecm_check_gcc_version
# @INTERNAL
# @DESCRIPTION:
# Determine if the current GCC version is acceptable, otherwise die.
_ecm_check_gcc_version() {
if [[ ${MERGE_TYPE} != binary && -v KDE_GCC_MINIMAL ]] && tc-is-gcc; then
local version=$(gcc-version)
debug-print "GCC version check activated"
debug-print "Version detected: ${version}"
debug-print "Version required: ${KDE_GCC_MINIMAL}"
ver_test ${version} -lt ${KDE_GCC_MINIMAL} &&
die "Sorry, but gcc-${KDE_GCC_MINIMAL} or later is required for this package (found ${version})."
fi
}
# @FUNCTION: _ecm_strip_handbook_translations
# @INTERNAL
# @DESCRIPTION:
# If LINGUAS is defined, enable only the requested translations when required.
_ecm_strip_handbook_translations() {
if ! [[ -v LINGUAS ]]; then
return
fi
local lang po
for po in ${ECM_PO_DIRS}; do
if [[ -d ${po} ]] ; then
pushd ${po} > /dev/null || die
for lang in *; do
if [[ -e ${lang} ]] && ! has ${lang/.po/} ${LINGUAS} ; then
case ${lang} in
cmake_modules | \
CMakeLists.txt | \
${PN}.pot) ;;
*) rm -r ${lang} || die ;;
esac
if [[ -e CMakeLists.txt ]] ; then
cmake_comment_add_subdirectory ${lang}
sed -e "/add_subdirectory([[:space:]]*${lang}\/.*[[:space:]]*)/d" \
-i CMakeLists.txt || die
fi
fi
done
popd > /dev/null || die
fi
done
}
# @FUNCTION: ecm_punt_bogus_dep
# @USAGE: <prefix> <dependency>
# @DESCRIPTION:
# Removes a specified dependency from a find_package call with multiple
# components.
ecm_punt_bogus_dep() {
local prefix=${1}
local dep=${2}
if [[ ! -e "CMakeLists.txt" ]]; then
return
fi
pcregrep -Mni "(?s)find_package\s*\(\s*${prefix}[^)]*?${dep}.*?\)" CMakeLists.txt > "${T}/bogus${dep}"
# pcregrep returns non-zero on no matches/error
if [[ $? -ne 0 ]] ; then
return
fi
local length=$(wc -l "${T}/bogus${dep}" | cut -d " " -f 1)
local first=$(head -n 1 "${T}/bogus${dep}" | cut -d ":" -f 1)
local last=$(( length + first - 1))
sed -e "${first},${last}s/${dep}//" -i CMakeLists.txt || die
if [[ ${length} -eq 1 ]] ; then
sed -e "/find_package\s*(\s*${prefix}\(\s\+\(REQUIRED\|CONFIG\|COMPONENTS\|\${[A-Z0-9_]*}\)\)\+\s*)/Is/^/# removed by ecm.eclass - /" -i CMakeLists.txt || die
fi
}
# @FUNCTION: ecm_pkg_pretend
# @DESCRIPTION:
# Checks if the active compiler meets the minimum version requirements.
# phase function is only exported if KDE_GCC_MINIMAL is defined.
ecm_pkg_pretend() {
debug-print-function ${FUNCNAME} "$@"
_ecm_check_gcc_version
}
# @FUNCTION: ecm_pkg_setup
# @DESCRIPTION:
# Checks if the active compiler meets the minimum version requirements.
ecm_pkg_setup() {
debug-print-function ${FUNCNAME} "$@"
_ecm_check_gcc_version
}
# @FUNCTION: ecm_src_prepare
# @DESCRIPTION:
# Wrapper for cmake_src_prepare with lots of extra logic for magic
# handling of linguas, tests, handbook etc.
ecm_src_prepare() {
debug-print-function ${FUNCNAME} "$@"
cmake_src_prepare
# only build examples when required
if ! { in_iuse examples && use examples; } ; then
cmake_comment_add_subdirectory examples
fi
# only enable handbook when required
if in_iuse handbook && ! use handbook ; then
cmake_comment_add_subdirectory ${ECM_HANDBOOK_DIR}
if [[ ${ECM_HANDBOOK} = forceoptional ]] ; then
ecm_punt_bogus_dep KF5 DocTools
sed -i -e "/kdoctools_install/ s/^/#DONT/" CMakeLists.txt || die
fi
fi
# drop translations when nls is not wanted
if in_iuse nls && ! use nls ; then
local po
for po in ${ECM_PO_DIRS}; do
rm -rf ${po} || die
done
fi
# limit playing field of locale stripping to kde-*/ categories
if [[ ${CATEGORY} = kde-* ]] ; then
# always install unconditionally for kconfigwidgets - if you use
# language X as system language, and there is a combobox with language
# names, the translated language name for language Y is taken from
# /usr/share/locale/Y/kf5_entry.desktop
[[ ${PN} != kconfigwidgets ]] && _ecm_strip_handbook_translations
fi
# only build unit tests when required
if ! { in_iuse test && use test; } ; then
if [[ ${ECM_TEST} = forceoptional ]] ; then
ecm_punt_bogus_dep Qt5 Test
# if forceoptional, also cover non-kde categories
cmake_comment_add_subdirectory autotests test tests
elif [[ ${ECM_TEST} = forceoptional-recursive ]] ; then
ecm_punt_bogus_dep Qt5 Test
local f pf="${T}/${P}"-tests-optional.patch
touch ${pf} || die "Failed to touch patch file"
for f in $(find . -type f -name "CMakeLists.txt" -exec \
grep -l "^\s*add_subdirectory\s*\(\s*.*\(auto|unit\)\?tests\?\s*)\s*\)" {} \;); do
cp ${f} ${f}.old || die "Failed to prepare patch origfile"
pushd ${f%/*} > /dev/null || die
ecm_punt_bogus_dep Qt5 Test
sed -i CMakeLists.txt -e \
"/^#/! s/add_subdirectory\s*\(\s*.*\(auto|unit\)\?tests\?\s*)\s*\)/if(BUILD_TESTING)\n&\nendif()/" \
|| die
popd > /dev/null || die
diff -Naur ${f}.old ${f} 1>>${pf}
rm ${f}.old || die "Failed to clean up"
done
eqawarn "Build system was modified by ECM_TEST=forceoptional-recursive."
eqawarn "Unified diff file ready for pickup in:"
eqawarn " ${pf}"
eqawarn "Push it upstream to make this message go away."
elif [[ ${CATEGORY} = kde-frameworks || ${CATEGORY} = kde-plasma || ${CATEGORY} = kde-apps ]] ; then
cmake_comment_add_subdirectory autotests test tests
fi
fi
# in frameworks, tests = manual tests so never build them
if [[ ${CATEGORY} = kde-frameworks ]] && [[ ${PN} != extra-cmake-modules ]]; then
cmake_comment_add_subdirectory tests
fi
}
# @FUNCTION: ecm_src_configure
# @DESCRIPTION:
# Wrapper for cmake_src_configure with extra logic for magic handling of
# handbook, tests etc.
ecm_src_configure() {
debug-print-function ${FUNCNAME} "$@"
if in_iuse debug && ! use debug; then
append-cppflags -DQT_NO_DEBUG
fi
local cmakeargs
if in_iuse test && ! use test ; then
cmakeargs+=( -DBUILD_TESTING=OFF )
if [[ ${ECM_TEST} = optional ]] ; then
cmakeargs+=( -DCMAKE_DISABLE_FIND_PACKAGE_Qt5Test=ON )
fi
fi
if [[ ${ECM_HANDBOOK} = optional ]] ; then
cmakeargs+=( -DCMAKE_DISABLE_FIND_PACKAGE_KF5DocTools=$(usex !handbook) )
fi
if in_iuse designer && [[ ${ECM_DESIGNERPLUGIN} = true ]]; then
cmakeargs+=( -DBUILD_DESIGNERPLUGIN=$(usex designer) )
fi
if [[ ${ECM_QTHELP} = true ]]; then
cmakeargs+=( -DBUILD_QCH=$(usex doc) )
fi
if [[ ${ECM_KDEINSTALLDIRS} = true ]] ; then
cmakeargs+=(
# install mkspecs in the same directory as Qt stuff
-DKDE_INSTALL_USE_QT_SYS_PATHS=ON
# move handbook outside of doc dir, bug 667138
-DKDE_INSTALL_DOCBUNDLEDIR="${EPREFIX}/usr/share/help"
)
fi
# allow the ebuild to override what we set here
mycmakeargs=("${cmakeargs[@]}" "${mycmakeargs[@]}")
cmake_src_configure
}
# @FUNCTION: ecm_src_compile
# @DESCRIPTION:
# Wrapper for cmake_src_compile. Currently doesn't do anything extra, but
# is included as part of the API just in case it's needed in the future.
ecm_src_compile() {
debug-print-function ${FUNCNAME} "$@"
cmake_src_compile "$@"
}
# @FUNCTION: ecm_src_test
# @DESCRIPTION:
# Wrapper for cmake_src_test with extra logic for magic handling of dbus
# and virtualx.
ecm_src_test() {
debug-print-function ${FUNCNAME} "$@"
_test_runner() {
if [[ -n "${VIRTUALDBUS_TEST}" ]]; then
export $(dbus-launch)
fi
cmake_src_test
}
# When run as normal user during ebuild development with the ebuild command,
# tests tend to access the session DBUS. This however is not possible in a
# real emerge or on the tinderbox.
# make sure it does not happen, so bad tests can be recognized and disabled
unset DBUS_SESSION_BUS_ADDRESS DBUS_SESSION_BUS_PID
if [[ ${VIRTUALX_REQUIRED} = always || ${VIRTUALX_REQUIRED} = test ]]; then
virtx _test_runner
else
_test_runner
fi
if [[ -n "${DBUS_SESSION_BUS_PID}" ]] ; then
kill ${DBUS_SESSION_BUS_PID}
fi
}
# @FUNCTION: ecm_src_install
# @DESCRIPTION:
# Wrapper for cmake_src_install. Currently doesn't do anything extra, but
# is included as part of the API just in case it's needed in the future.
ecm_src_install() {
debug-print-function ${FUNCNAME} "$@"
cmake_src_install
}
# @FUNCTION: ecm_pkg_preinst
# @DESCRIPTION:
# Sets up environment variables required in ecm_pkg_postinst.
ecm_pkg_preinst() {
debug-print-function ${FUNCNAME} "$@"
case ${ECM_NONGUI} in
false) xdg_pkg_preinst ;;
*) ;;
esac
}
# @FUNCTION: ecm_pkg_postinst
# @DESCRIPTION:
# Updates the various XDG caches (icon, desktop, mime) if necessary.
ecm_pkg_postinst() {
debug-print-function ${FUNCNAME} "$@"
case ${ECM_NONGUI} in
false) xdg_pkg_postinst ;;
*) ;;
esac
if [[ -n ${_KDE_ORG_ECLASS} ]] && [[ -z ${I_KNOW_WHAT_I_AM_DOING} ]] && [[ ${KDE_BUILD_TYPE} = live ]]; then
einfo "WARNING! This is an experimental live ebuild of ${CATEGORY}/${PN}"
einfo "Use it at your own risk."
einfo "Do _NOT_ file bugs at bugs.gentoo.org because of this ebuild!"
fi
}
# @FUNCTION: ecm_pkg_postrm
# @DESCRIPTION:
# Updates the various XDG caches (icon, desktop, mime) if necessary.
ecm_pkg_postrm() {
debug-print-function ${FUNCNAME} "$@"
case ${ECM_NONGUI} in
false) xdg_pkg_postrm ;;
*) ;;
esac
}
fi

21
eclass/edos2unix.eclass Normal file
View File

@ -0,0 +1,21 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: edos2unix.eclass
# @MAINTAINER:
# base-system@gentoo.org
# @BLURB: convert files from DOS CRLF to UNIX LF line endings
# @FUNCTION: edos2unix
# @USAGE: <file> [more files ...]
# @DESCRIPTION:
# A handy replacement for dos2unix, recode, fixdos, etc... This allows
# you to remove all of these text utilities from DEPEND variables
# because this is a script based solution. Just give it a list of files
# to convert and they will all be changed from the DOS CRLF format to
# the UNIX LF format.
edos2unix() {
[[ $# -eq 0 ]] && return 0
sed -i 's/\r$//' -- "$@" || die
}

503
eclass/elisp-common.eclass Normal file
View File

@ -0,0 +1,503 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: elisp-common.eclass
# @MAINTAINER:
# Gentoo GNU Emacs project <gnu-emacs@gentoo.org>
# @AUTHOR:
# Matthew Kennedy <mkennedy@gentoo.org>
# Jeremy Maitin-Shepard <jbms@attbi.com>
# Mamoru Komachi <usata@gentoo.org>
# Christian Faulhammer <fauli@gentoo.org>
# Ulrich Müller <ulm@gentoo.org>
# @BLURB: Emacs-related installation utilities
# @DESCRIPTION:
#
# Usually you want to use this eclass for (optional) GNU Emacs support
# of your package. This is NOT for XEmacs!
#
# Many of the steps here are sometimes done by the build system of your
# package (especially compilation), so this is mainly for standalone
# elisp files you gathered from somewhere else.
#
# When relying on the emacs USE flag, you need to add
#
# @CODE
# emacs? ( >=app-editors/emacs-23.1:* )
# @CODE
#
# to your DEPEND/RDEPEND line and use the functions provided here to
# bring the files to the correct locations.
#
# If your package requires a minimum Emacs version, e.g. Emacs 26.1,
# then the dependency should be on >=app-editors/emacs-26.1:* instead.
# Because the user can select the Emacs executable with eselect, you
# should also make sure that the active Emacs version is sufficient.
# The eclass will automatically ensure this if you assign variable
# NEED_EMACS with the Emacs version, as in the following example:
#
# @CODE
# NEED_EMACS=26.1
# @CODE
#
# Please note that this should be done only for packages that are known
# to fail with lower Emacs versions.
#
# @SUBSECTION src_compile() usage:
#
# An elisp file is compiled by the elisp-compile() function defined
# here and simply takes the source files as arguments. The case of
# interdependent elisp files is also supported, since the current
# directory is added to the load-path which makes sure that all files
# are loadable.
#
# @CODE
# elisp-compile *.el
# @CODE
#
# Function elisp-make-autoload-file() can be used to generate a file
# with autoload definitions for the lisp functions. It takes the output
# file name (default: "${PN}-autoloads.el") and a list of directories
# (default: working directory) as its arguments. Use of this function
# requires that the elisp source files contain magic ";;;###autoload"
# comments. See the Emacs Lisp Reference Manual (node "Autoload") for
# a detailed explanation.
#
# @SUBSECTION src_install() usage:
#
# The resulting compiled files (.elc) should be put in a subdirectory of
# /usr/share/emacs/site-lisp/ which is named after the first argument
# of elisp-install(). The following parameters are the files to be put
# in that directory. Usually the subdirectory should be ${PN}, you can
# choose something else, but remember to tell elisp-site-file-install()
# (see below) the change, as it defaults to ${PN}.
#
# @CODE
# elisp-install ${PN} *.el *.elc
# @CODE
#
# To let the Emacs support be activated by Emacs on startup, you need
# to provide a site file (shipped in ${FILESDIR}) which contains the
# startup code (have a look in the documentation of your software).
# Normally this would look like this:
#
# @CODE
# (add-to-list 'load-path "@SITELISP@")
# (add-to-list 'auto-mode-alist '("\\.csv\\'" . csv-mode))
# (autoload 'csv-mode "csv-mode" "Major mode for csv files." t)
# @CODE
#
# If your Emacs support files are installed in a subdirectory of
# /usr/share/emacs/site-lisp/ (which is strongly recommended), you need
# to extend Emacs' load-path as shown in the first non-comment line.
# The elisp-site-file-install() function of this eclass will replace
# "@SITELISP@" and "@SITEETC@" by the actual paths.
#
# The next line tells Emacs to load the mode opening a file ending
# with ".csv" and load functions depending on the context and needed
# features. Be careful though. Commands as "load-library" or "require"
# bloat the editor as they are loaded on every startup. When having
# many Emacs support files, users may be annoyed by the start-up time.
# Also avoid keybindings as they might interfere with the user's
# settings. Give a hint in pkg_postinst(), which should be enough.
# The guiding principle is that emerging your package should not by
# itself cause a change of standard Emacs behaviour.
#
# The naming scheme for this site-init file matches the shell pattern
# "[1-8][0-9]*-gentoo*.el", where the two digits at the beginning define
# the loading order (numbers below 10 or above 89 are reserved for
# internal use). So if your initialisation depends on another Emacs
# package, your site file's number must be higher! If there are no such
# interdependencies then the number should be 50. Otherwise, numbers
# divisible by 10 are preferred.
#
# Best practice is to define a SITEFILE variable in the global scope of
# your ebuild (e.g., right after S or RDEPEND):
#
# @CODE
# SITEFILE="50${PN}-gentoo.el"
# @CODE
#
# Which is then installed by
#
# @CODE
# elisp-site-file-install "${FILESDIR}/${SITEFILE}"
# @CODE
#
# in src_install(). Any characters after the "-gentoo" part and before
# the extension will be stripped from the destination file's name.
# For example, a file "50${PN}-gentoo-${PV}.el" will be installed as
# "50${PN}-gentoo.el". If your subdirectory is not named ${PN}, give
# the differing name as second argument.
#
# @SUBSECTION pkg_setup() usage:
#
# If your ebuild uses the elisp-compile eclass function to compile
# its elisp files (see above), then you don't need a pkg_setup phase,
# because elisp-compile and elisp-make-autoload-file do their own sanity
# checks. On the other hand, if the elisp files are compiled by the
# package's build system, then there is often no check for the Emacs
# version. In this case, you can add an explicit check in pkg_setup:
#
# @CODE
# elisp-check-emacs-version
# @CODE
#
# When having optional Emacs support, you should prepend "use emacs &&"
# to above call of elisp-check-emacs-version().
#
# @SUBSECTION pkg_postinst() / pkg_postrm() usage:
#
# After that you need to recreate the start-up file of Emacs after
# emerging and unmerging by using
#
# @CODE
# pkg_postinst() {
# elisp-site-regen
# }
#
# pkg_postrm() {
# elisp-site-regen
# }
# @CODE
#
# Again, with optional Emacs support, you should prepend "use emacs &&"
# to above calls of elisp-site-regen().
case ${EAPI:-0} in
4|5|6) inherit eapi7-ver ;;
7) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
# @ECLASS-VARIABLE: SITELISP
# @DESCRIPTION:
# Directory where packages install Emacs Lisp files.
SITELISP=/usr/share/emacs/site-lisp
# @ECLASS-VARIABLE: SITEETC
# @DESCRIPTION:
# Directory where packages install miscellaneous (not Lisp) files.
SITEETC=/usr/share/emacs/etc
# @ECLASS-VARIABLE: EMACSMODULES
# @DESCRIPTION:
# Directory where packages install dynamically loaded modules.
# May contain a @libdir@ token which will be replaced by $(get_libdir).
EMACSMODULES=/usr/@libdir@/emacs/modules
# @ECLASS-VARIABLE: EMACS
# @DESCRIPTION:
# Path of Emacs executable.
EMACS=${EPREFIX}/usr/bin/emacs
# @ECLASS-VARIABLE: EMACSFLAGS
# @DESCRIPTION:
# Flags for executing Emacs in batch mode.
# These work for Emacs versions 18-24, so don't change them.
EMACSFLAGS="-batch -q --no-site-file"
# @ECLASS-VARIABLE: BYTECOMPFLAGS
# @DESCRIPTION:
# Emacs flags used for byte-compilation in elisp-compile().
BYTECOMPFLAGS="-L ."
# @ECLASS-VARIABLE: NEED_EMACS
# @DESCRIPTION:
# The minimum Emacs version required for the package.
: ${NEED_EMACS:=23.1}
# @ECLASS-VARIABLE: _ELISP_EMACS_VERSION
# @INTERNAL
# @DESCRIPTION:
# Cached value of Emacs version detected in elisp-check-emacs-version().
_ELISP_EMACS_VERSION=""
# @FUNCTION: elisp-emacs-version
# @RETURN: exit status of Emacs
# @DESCRIPTION:
# Output version of currently active Emacs.
elisp-emacs-version() {
local version ret
# The following will work for at least versions 18-24.
echo "(princ emacs-version)" >"${T}"/emacs-version.el
version=$(
# EMACS could be a microemacs variant that ignores the -batch
# option and would therefore hang, waiting for user interaction.
# Redirecting stdin and unsetting TERM and DISPLAY will cause
# most of them to exit with an error.
unset TERM DISPLAY
${EMACS} ${EMACSFLAGS} -l "${T}"/emacs-version.el </dev/null
)
ret=$?
rm -f "${T}"/emacs-version.el
if [[ ${ret} -ne 0 ]]; then
eerror "elisp-emacs-version: Failed to run ${EMACS}"
return ${ret}
fi
if [[ -z ${version} ]]; then
eerror "elisp-emacs-version: Could not determine Emacs version"
return 1
fi
echo "${version}"
}
# @FUNCTION: elisp-check-emacs-version
# @USAGE: [version]
# @DESCRIPTION:
# Test if the eselected Emacs version is at least the version of
# GNU Emacs specified in the NEED_EMACS variable, or die otherwise.
elisp-check-emacs-version() {
if [[ -z ${_ELISP_EMACS_VERSION} ]]; then
local have_emacs
have_emacs=$(elisp-emacs-version) \
|| die "Could not determine Emacs version"
einfo "Emacs version: ${have_emacs}"
if [[ ${have_emacs} =~ XEmacs|Lucid ]]; then
die "XEmacs detected. This package needs GNU Emacs."
fi
# GNU Emacs versions have only numeric components.
if ! [[ ${have_emacs} =~ ^[0-9]+(\.[0-9]+)*$ ]]; then
die "Malformed version string: ${have_emacs}"
fi
_ELISP_EMACS_VERSION=${have_emacs}
fi
if ! ver_test "${_ELISP_EMACS_VERSION}" -ge "${NEED_EMACS}"; then
eerror "This package needs at least Emacs ${NEED_EMACS}."
eerror "Use \"eselect emacs\" to select the active version."
die "Emacs version too low"
fi
}
# Test if the eselected Emacs version is at least the major version
# of GNU Emacs specified as argument.
# Return 0 if true, 1 if false, 2 if trouble.
# Deprecated, use elisp-check-emacs-version instead.
elisp-need-emacs() {
local need_emacs=$1 have_emacs
have_emacs=$(elisp-emacs-version) || return 2
einfo "Emacs version: ${have_emacs}"
if [[ ${have_emacs} =~ XEmacs|Lucid ]]; then
eerror "This package needs GNU Emacs."
return 1
fi
if ! [[ ${have_emacs%%.*} -ge ${need_emacs%%.*} ]]; then
eerror "This package needs at least Emacs ${need_emacs%%.*}."
eerror "Use \"eselect emacs\" to select the active version."
return 1
fi
return 0
}
# @FUNCTION: elisp-compile
# @USAGE: <list of elisp files>
# @DESCRIPTION:
# Byte-compile Emacs Lisp files.
#
# This function uses GNU Emacs to byte-compile all ".el" specified by
# its arguments. The resulting byte-code (".elc") files are placed in
# the same directory as their corresponding source file.
#
# The current directory is added to the load-path. This will ensure
# that interdependent Emacs Lisp files are visible between themselves,
# in case they require or load one another.
elisp-compile() {
elisp-check-emacs-version
ebegin "Compiling GNU Emacs Elisp files"
${EMACS} ${EMACSFLAGS} ${BYTECOMPFLAGS} -f batch-byte-compile "$@"
eend $? "elisp-compile: batch-byte-compile failed" || die
}
# @FUNCTION: elisp-make-autoload-file
# @USAGE: [output file] [list of directories]
# @DESCRIPTION:
# Generate a file with autoload definitions for the lisp functions.
elisp-make-autoload-file() {
local f="${1:-${PN}-autoloads.el}" null="" page=$'\f'
shift
elisp-check-emacs-version
ebegin "Generating autoload file for GNU Emacs"
cat >"${f}" <<-EOF
;;; ${f##*/} --- autoloads for ${PN}
;;; Commentary:
;; Automatically generated by elisp-common.eclass
;; DO NOT EDIT THIS FILE
;;; Code:
${page}
;; Local ${null}Variables:
;; version-control: never
;; no-byte-compile: t
;; no-update-autoloads: t
;; End:
;;; ${f##*/} ends here
EOF
${EMACS} ${EMACSFLAGS} \
--eval "(setq make-backup-files nil)" \
--eval "(setq generated-autoload-file (expand-file-name \"${f}\"))" \
-f batch-update-autoloads "${@-.}"
eend $? "elisp-make-autoload-file: batch-update-autoloads failed" || die
}
# @FUNCTION: elisp-install
# @USAGE: <subdirectory> <list of files>
# @DESCRIPTION:
# Install files in SITELISP directory.
elisp-install() {
local subdir="$1"
shift
ebegin "Installing Elisp files for GNU Emacs support"
( # subshell to avoid pollution of calling environment
insinto "${SITELISP}/${subdir}"
doins "$@"
)
eend $? "elisp-install: doins failed" || die
}
# @FUNCTION: elisp-modules-install
# @USAGE: <subdirectory> <list of files>
# @DESCRIPTION:
# Install dynamic modules in EMACSMODULES directory.
elisp-modules-install() {
local subdir="$1"
shift
# Don't bother inheriting multilib.eclass for get_libdir(), but
# error out in old EAPIs that don't support it natively.
[[ ${EAPI} == [45] ]] \
&& die "${ECLASS}: Dynamic modules not supported in EAPI ${EAPI}"
ebegin "Installing dynamic modules for GNU Emacs support"
( # subshell to avoid pollution of calling environment
exeinto "${EMACSMODULES//@libdir@/$(get_libdir)}/${subdir}"
doexe "$@"
)
eend $? "elisp-modules-install: doins failed" || die
}
# @FUNCTION: elisp-site-file-install
# @USAGE: <site-init file> [subdirectory]
# @DESCRIPTION:
# Install Emacs site-init file in SITELISP directory. Automatically
# inserts a standard comment header with the name of the package
# (unless it is already present). Tokens @SITELISP@, @SITEETC@,
# and @EMACSMODULES@ are replaced by the path to the package's
# subdirectory in SITELISP, SITEETC, and EMACSMODULES, respectively.
elisp-site-file-install() {
local sf="${1##*/}" my_pn="${2:-${PN}}" modules ret
local header=";;; ${PN} site-lisp configuration"
[[ ${sf} == [0-9][0-9]*-gentoo*.el ]] \
|| ewarn "elisp-site-file-install: bad name of site-init file"
[[ ${sf%-gentoo*.el} != "${sf}" ]] && sf="${sf%-gentoo*.el}-gentoo.el"
sf="${T}/${sf}"
ebegin "Installing site initialisation file for GNU Emacs"
[[ $1 = "${sf}" ]] || cp "$1" "${sf}"
if [[ ${EAPI} == [45] ]]; then
grep -q "@EMACSMODULES@" "${sf}" \
&& die "${ECLASS}: Dynamic modules not supported in EAPI ${EAPI}"
else
modules=${EMACSMODULES//@libdir@/$(get_libdir)}
fi
sed -i -e "1{:x;/^\$/{n;bx;};/^;.*${PN}/I!s:^:${header}\n\n:;1s:^:\n:;}" \
-e "s:@SITELISP@:${EPREFIX}${SITELISP}/${my_pn}:g" \
-e "s:@SITEETC@:${EPREFIX}${SITEETC}/${my_pn}:g" \
-e "s:@EMACSMODULES@:${EPREFIX}${modules}/${my_pn}:g;\$q" "${sf}"
( # subshell to avoid pollution of calling environment
insinto "${SITELISP}/site-gentoo.d"
doins "${sf}"
)
ret=$?
rm -f "${sf}"
eend ${ret} "elisp-site-file-install: doins failed" || die
}
# @FUNCTION: elisp-site-regen
# @DESCRIPTION:
# Regenerate the site-gentoo.el file, based on packages' site
# initialisation files in the /usr/share/emacs/site-lisp/site-gentoo.d/
# directory.
elisp-site-regen() {
local sitelisp=${ROOT%/}${EPREFIX}${SITELISP}
local sf i ret=0 null="" page=$'\f'
local -a sflist
if [[ ${EBUILD_PHASE} = *rm && ! -e ${sitelisp}/site-gentoo.el ]]; then
ewarn "Refusing to create site-gentoo.el in ${EBUILD_PHASE} phase."
return 0
fi
[[ -d ${sitelisp} ]] \
|| die "elisp-site-regen: Directory ${sitelisp} does not exist"
[[ -d ${T} ]] \
|| die "elisp-site-regen: Temporary directory ${T} does not exist"
ebegin "Regenerating site-gentoo.el for GNU Emacs (${EBUILD_PHASE})"
for sf in "${sitelisp}"/site-gentoo.d/[0-9][0-9]*.el; do
[[ -r ${sf} ]] && sflist+=("${sf}")
done
cat <<-EOF >"${T}"/site-gentoo.el || ret=$?
;;; site-gentoo.el --- site initialisation for Gentoo-installed packages
;;; Commentary:
;; Automatically generated by elisp-common.eclass
;; DO NOT EDIT THIS FILE
;;; Code:
EOF
# Use sed instead of cat here, since files may miss a trailing newline.
sed '$q' "${sflist[@]}" </dev/null >>"${T}"/site-gentoo.el || ret=$?
cat <<-EOF >>"${T}"/site-gentoo.el || ret=$?
${page}
(provide 'site-gentoo)
;; Local ${null}Variables:
;; no-byte-compile: t
;; buffer-read-only: t
;; End:
;;; site-gentoo.el ends here
EOF
if [[ ${ret} -ne 0 ]]; then
eend ${ret} "elisp-site-regen: Writing site-gentoo.el failed."
die
elif cmp -s "${sitelisp}"/site-gentoo.el "${T}"/site-gentoo.el; then
# This prevents outputting unnecessary text when there
# was actually no change.
# A case is a remerge where we have doubled output.
rm -f "${T}"/site-gentoo.el
eend 0
einfo "... no changes."
else
mv "${T}"/site-gentoo.el "${sitelisp}"/site-gentoo.el
eend $? "elisp-site-regen: Replacing site-gentoo.el failed" || die
case ${#sflist[@]} in
0) [[ ${PN} = emacs-common-gentoo ]] \
|| ewarn "... Huh? No site initialisation files found." ;;
1) einfo "... ${#sflist[@]} site initialisation file included." ;;
*) einfo "... ${#sflist[@]} site initialisation files included." ;;
esac
fi
return 0
}

202
eclass/elisp.eclass Normal file
View File

@ -0,0 +1,202 @@
# Copyright 2002-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: elisp.eclass
# @MAINTAINER:
# Gentoo GNU Emacs project <gnu-emacs@gentoo.org>
# @AUTHOR:
# Matthew Kennedy <mkennedy@gentoo.org>
# Jeremy Maitin-Shepard <jbms@attbi.com>
# Christian Faulhammer <fauli@gentoo.org>
# Ulrich Müller <ulm@gentoo.org>
# @SUPPORTED_EAPIS: 4 5 6 7
# @BLURB: Eclass for Emacs Lisp packages
# @DESCRIPTION:
#
# This eclass is designed to install elisp files of Emacs related
# packages into the site-lisp directory. The majority of elisp packages
# will only need to define the standard ebuild variables (like SRC_URI)
# and optionally SITEFILE for successful installation.
#
# Emacs support for other than pure elisp packages is handled by
# elisp-common.eclass where you won't have a dependency on Emacs itself.
# All elisp-* functions are documented there.
#
# If the package's source is a single (in whatever way) compressed elisp
# file with the file name ${P}.el, then this eclass will move ${P}.el to
# ${PN}.el in src_unpack().
# @ECLASS-VARIABLE: NEED_EMACS
# @DEFAULT_UNSET
# @DESCRIPTION:
# If you need anything different from Emacs 23, use the NEED_EMACS
# variable before inheriting elisp.eclass. Set it to the version your
# package uses and the dependency will be adjusted.
# @ECLASS-VARIABLE: ELISP_PATCHES
# @DEFAULT_UNSET
# @DESCRIPTION:
# Space separated list of patches to apply after unpacking the sources.
# Patch files are searched for in the current working dir, WORKDIR, and
# FILESDIR. This variable is semi-deprecated, preferably use the
# PATCHES array instead if the EAPI supports it.
# @ECLASS-VARIABLE: ELISP_REMOVE
# @DEFAULT_UNSET
# @DESCRIPTION:
# Space separated list of files to remove after unpacking the sources.
# @ECLASS-VARIABLE: SITEFILE
# @DEFAULT_UNSET
# @DESCRIPTION:
# Name of package's site-init file. The filename must match the shell
# pattern "[1-8][0-9]*-gentoo.el"; numbers below 10 and above 89 are
# reserved for internal use. "50${PN}-gentoo.el" is a reasonable choice
# in most cases.
# @ECLASS-VARIABLE: ELISP_TEXINFO
# @DEFAULT_UNSET
# @DESCRIPTION:
# Space separated list of Texinfo sources. Respective GNU Info files
# will be generated in src_compile() and installed in src_install().
inherit elisp-common
case ${EAPI:-0} in
4|5) inherit epatch ;;
6|7) ;;
*) die "${ECLASS}: EAPI ${EAPI:-0} not supported" ;;
esac
EXPORT_FUNCTIONS src_{unpack,prepare,configure,compile,install} \
pkg_{setup,postinst,postrm}
RDEPEND=">=app-editors/emacs-${NEED_EMACS}:*"
case ${EAPI} in
4) RDEPEND="${RDEPEND%:*}"; DEPEND="${RDEPEND}" ;;
5|6) DEPEND="${RDEPEND}" ;;
*) BDEPEND="${RDEPEND}" ;;
esac
# @FUNCTION: elisp_pkg_setup
# @DESCRIPTION:
# Test if the eselected Emacs version is sufficient to fulfil the
# version requirement of the NEED_EMACS variable.
elisp_pkg_setup() {
elisp-check-emacs-version
}
# @FUNCTION: elisp_src_unpack
# @DESCRIPTION:
# Unpack the sources; also handle the case of a single *.el file in
# WORKDIR for packages distributed that way.
elisp_src_unpack() {
default
if [[ -f ${P}.el ]]; then
# the "simple elisp" case with a single *.el file in WORKDIR
mv ${P}.el ${PN}.el || die
[[ -d ${S} ]] || S=${WORKDIR}
fi
}
# @FUNCTION: elisp_src_prepare
# @DESCRIPTION:
# Apply any patches listed in ELISP_PATCHES. Patch files are searched
# for in the current working dir, WORKDIR, and FILESDIR.
elisp_src_prepare() {
local patch file
for patch in ${ELISP_PATCHES}; do
if [[ -f ${patch} ]]; then
file="${patch}"
elif [[ -f ${WORKDIR}/${patch} ]]; then
file="${WORKDIR}/${patch}"
elif [[ -f ${FILESDIR}/${patch} ]]; then
file="${FILESDIR}/${patch}"
else
die "Cannot find ${patch}"
fi
case ${EAPI} in
4|5) epatch "${file}" ;;
*) eapply "${file}" ;;
esac
done
# apply PATCHES (if supported in EAPI), and any user patches
case ${EAPI} in
4|5) epatch_user ;;
*) default ;;
esac
if [[ -n ${ELISP_REMOVE} ]]; then
rm ${ELISP_REMOVE} || die
fi
}
# @FUNCTION: elisp_src_configure
# @DESCRIPTION:
# Do nothing, because Emacs packages seldomly bring a full build system.
elisp_src_configure() { :; }
# @FUNCTION: elisp_src_compile
# @DESCRIPTION:
# Call elisp-compile to byte-compile all Emacs Lisp (*.el) files.
# If ELISP_TEXINFO lists any Texinfo sources, call makeinfo to generate
# GNU Info files from them.
elisp_src_compile() {
elisp-compile *.el
if [[ -n ${ELISP_TEXINFO} ]]; then
makeinfo ${ELISP_TEXINFO} || die
fi
}
# @FUNCTION: elisp_src_install
# @DESCRIPTION:
# Call elisp-install to install all Emacs Lisp (*.el and *.elc) files.
# If the SITEFILE variable specifies a site-init file, install it with
# elisp-site-file-install. Also install any GNU Info files listed in
# ELISP_TEXINFO and documentation listed in the DOCS variable.
elisp_src_install() {
elisp-install ${PN} *.el *.elc
if [[ -n ${SITEFILE} ]]; then
elisp-site-file-install "${FILESDIR}/${SITEFILE}"
fi
if [[ -n ${ELISP_TEXINFO} ]]; then
set -- ${ELISP_TEXINFO}
set -- ${@##*/}
doinfo ${@/%.*/.info*}
fi
# install documentation only when explicitly requested
case ${EAPI} in
4|5) [[ -n ${DOCS} ]] && dodoc ${DOCS} ;;
*) [[ $(declare -p DOCS 2>/dev/null) == *=* ]] && einstalldocs ;;
esac
if declare -f readme.gentoo_create_doc >/dev/null; then
readme.gentoo_create_doc
fi
}
# @FUNCTION: elisp_pkg_postinst
# @DESCRIPTION:
# Call elisp-site-regen, in order to collect the site initialisation for
# all installed Emacs Lisp packages in the site-gentoo.el file.
elisp_pkg_postinst() {
elisp-site-regen
if declare -f readme.gentoo_print_elog >/dev/null; then
readme.gentoo_print_elog
fi
}
# @FUNCTION: elisp_pkg_postrm
# @DESCRIPTION:
# Call elisp-site-regen, in order to collect the site initialisation for
# all installed Emacs Lisp packages in the site-gentoo.el file.
elisp_pkg_postrm() {
elisp-site-regen
}

163
eclass/emboss-r2.eclass Normal file
View File

@ -0,0 +1,163 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: emboss-r2.eclass
# @MAINTAINER:
# sci-biology@gentoo.org
# ted.tanberry@gmail.com
# @AUTHOR:
# Original author: Author Olivier Fisette <ofisette@gmail.com>
# Next gen author: Justin Lecher <jlec@gentoo.org>
# Next gen author: Ted Tanberry <ted.tanberry@gmail.com>
# @SUPPORTED_EAPIS: 6
# @BLURB: Use this to easy install EMBOSS and EMBASSY programs (EMBOSS add-ons).
# @DESCRIPTION:
# The inheriting ebuild must set at least EAPI=6 and provide EBO_DESCRIPTION before the inherit line.
# KEYWORDS should be set. Additionally "(R|P)DEPEND"encies and other standard
# ebuild variables can be extended (FOO+=" bar").
#
# Example:
#
# EAPI=6
#
# EBO_DESCRIPTION="applications from the CBS group"
#
# inherit emboss-r2
# @ECLASS-VARIABLE: EBO_DESCRIPTION
# @DEFAULT_UNSET
# @DESCRIPTION:
# Should be set. Completes the generic description of the embassy module as follows:
#
# EMBOSS integrated version of ${EBO_DESCRIPTION}, e.g.
#
# "EMBOSS integrated version of applications from the CBS group"
#
# Defaults to the upstream name of the module.
if [[ ! ${_EMBOSS_R2} ]]; then
case ${EAPI:-0} in
6) ;;
*) die "EAPI=${EAPI} is not supported" ;;
esac
inherit autotools flag-o-matic
EXPORT_FUNCTIONS src_prepare src_configure src_install
HOMEPAGE="http://emboss.sourceforge.net/"
LICENSE="LGPL-2 GPL-2"
SLOT="0"
IUSE="mysql pdf png postgres static-libs X"
RDEPEND="
dev-libs/expat
dev-libs/libpcre:3
sci-libs/plplot:=
sys-libs/zlib
mysql? ( dev-db/mysql-connector-c:0= )
pdf? ( media-libs/libharu:= )
png? ( media-libs/gd:2=[png] )
postgres? ( dev-db/postgresql:= )
X? ( x11-libs/libXt )"
if [[ ${PN} == embassy-* ]]; then
EMBASSY_PACKAGE=yes
# The EMBASSY package name, retrieved from the inheriting ebuild's name
EN=${PN:8}
# The full name and version of the EMBASSY package (excluding the Gentoo
# revision number)
EF="${EN^^}-${PV}"
[[ ${EBO_DESCRIPTION} ]] || die "EBO_DESCRIPTION was not set before inheriting emboss-r2.eclass"
DESCRIPTION="EMBOSS integrated version of ${EBO_DESCRIPTION}"
SRC_URI="ftp://emboss.open-bio.org/pub/EMBOSS/${EF}.tar.gz -> embassy-${EN}-${PV}.tar.gz"
RDEPEND+=" >=sci-biology/emboss-6.6.0-r1[mysql=,pdf=,png=,postgres=,static-libs=,X=]"
S="${WORKDIR}/${EF}"
fi
DEPEND="${RDEPEND}"
# @ECLASS-VARIABLE: EBO_EAUTORECONF
# @DEFAULT_UNSET
# @DESCRIPTION:
# If set, run eautoreconf from autotools.eclass after applying patches
# in emboss-r2_src_prepare.
# @FUNCTION: emboss-r2_src_prepare
# @DESCRIPTION:
# Does the following things
#
# 1. Renames configure.in to configure.ac, if possible
# 2. Calls default_src_prepare (i.e.
# applies Gentoo and user patches in EAPI>=6)
# 3. If EBO_EAUTORECONF is set, run eautoreconf
#
emboss-r2_src_prepare() {
if [[ -e configure.in ]]; then
mv configure.{in,ac} || die
fi
default
[[ ${EBO_EAUTORECONF} ]] && eautoreconf
}
# @FUNCTION: emboss-r2_src_configure
# @DESCRIPTION:
# runs econf with following options.
#
# --enable-shared
# $(use_enable static-libs static)
# $(use_with X x)
# $(use_with png pngdriver)
# $(use_with pdf hpdf)
# $(use_with mysql mysql)
# $(use_with postgres postgresql)
# --enable-large
# --without-java
# --enable-systemlibs
#
# can be appended to like econf, e.g.
# emboss-r2_src_configure --disable-shared
emboss-r2_src_configure() {
local myconf=(
--enable-shared
$(use_enable static-libs static)
$(use_with X x)
$(use_with png pngdriver "${EPREFIX}/usr")
$(use_with pdf hpdf "${EPREFIX}/usr")
$(use_with mysql mysql "${EPREFIX}/usr/bin/mysql_config")
$(use_with postgres postgresql "${EPREFIX}/usr/bin/pg_config")
--enable-large
--without-java
--enable-systemlibs
)
[[ ${EMBASSY_PACKAGE} == yes ]] && \
append-cppflags "-I${EPREFIX}/usr/include/emboss"
econf "${myconf[@]}" "$@"
}
# @FUNCTION: emboss-r2_src_install
# @DESCRIPTION:
# Installs the package into the staging area and removes
# extraneous .la files, if USE="-static-libs"
emboss-r2_src_install() {
default
# delete .la files
if ! use static-libs; then
find "${D}" -name '*.la' -delete || die
fi
}
_EMBOSS_R2=1
fi

469
eclass/epatch.eclass Normal file
View File

@ -0,0 +1,469 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: epatch.eclass
# @MAINTAINER:
# base-system@gentoo.org
# @SUPPORTED_EAPIS: 0 1 2 3 4 5 6
# @BLURB: easy patch application functions
# @DEPRECATED: eapply from EAPI 7
# @DESCRIPTION:
# An eclass providing epatch and epatch_user functions to easily apply
# patches to ebuilds. Mostly superseded by eapply* in EAPI 6.
if [[ -z ${_EPATCH_ECLASS} ]]; then
case ${EAPI:-0} in
0|1|2|3|4|5|6)
;;
*)
die "${ECLASS}: banned in EAPI=${EAPI}; use eapply* instead";;
esac
inherit estack
# @VARIABLE: EPATCH_SOURCE
# @DESCRIPTION:
# Default directory to search for patches.
EPATCH_SOURCE="${WORKDIR}/patch"
# @VARIABLE: EPATCH_SUFFIX
# @DESCRIPTION:
# Default extension for patches (do not prefix the period yourself).
EPATCH_SUFFIX="patch.bz2"
# @VARIABLE: EPATCH_OPTS
# @DESCRIPTION:
# Options to pass to patch. Meant for ebuild/package-specific tweaking
# such as forcing the patch level (-p#) or fuzz (-F#) factor. Note that
# for single patch tweaking, you can also pass flags directly to epatch.
EPATCH_OPTS=""
# @VARIABLE: EPATCH_COMMON_OPTS
# @DESCRIPTION:
# Common options to pass to `patch`. You probably should never need to
# change these. If you do, please discuss it with base-system first to
# be sure.
# @CODE
# -g0 - keep RCS, ClearCase, Perforce and SCCS happy #24571
# --no-backup-if-mismatch - do not leave .orig files behind
# -E - automatically remove empty files
# @CODE
EPATCH_COMMON_OPTS="-g0 -E --no-backup-if-mismatch"
# @VARIABLE: EPATCH_EXCLUDE
# @DESCRIPTION:
# List of patches not to apply. Note this is only file names,
# and not the full path. Globs accepted.
EPATCH_EXCLUDE=""
# @VARIABLE: EPATCH_SINGLE_MSG
# @DESCRIPTION:
# Change the printed message for a single patch.
EPATCH_SINGLE_MSG=""
# @VARIABLE: EPATCH_MULTI_MSG
# @DESCRIPTION:
# Change the printed message for multiple patches.
EPATCH_MULTI_MSG="Applying various patches (bugfixes/updates) ..."
# @VARIABLE: EPATCH_FORCE
# @DESCRIPTION:
# Only require patches to match EPATCH_SUFFIX rather than the extended
# arch naming style.
EPATCH_FORCE="no"
# @VARIABLE: EPATCH_USER_EXCLUDE
# @DEFAULT_UNSET
# @DESCRIPTION:
# List of patches not to apply. Note this is only file names,
# and not the full path. Globs accepted.
# @FUNCTION: epatch
# @USAGE: [options] [patches] [dirs of patches]
# @DESCRIPTION:
# epatch is designed to greatly simplify the application of patches. It can
# process patch files directly, or directories of patches. The patches may be
# compressed (bzip/gzip/etc...) or plain text. You generally need not specify
# the -p option as epatch will automatically attempt -p0 to -p4 until things
# apply successfully.
#
# If you do not specify any patches/dirs, then epatch will default to the
# directory specified by EPATCH_SOURCE.
#
# Any options specified that start with a dash will be passed down to patch
# for this specific invocation. As soon as an arg w/out a dash is found, then
# arg processing stops.
#
# When processing directories, epatch will apply all patches that match:
# @CODE
# if ${EPATCH_FORCE} != "yes"
# ??_${ARCH}_foo.${EPATCH_SUFFIX}
# else
# *.${EPATCH_SUFFIX}
# @CODE
# The leading ?? are typically numbers used to force consistent patch ordering.
# The arch field is used to apply patches only for the host architecture with
# the special value of "all" means apply for everyone. Note that using values
# other than "all" is highly discouraged -- you should apply patches all the
# time and let architecture details be detected at configure/compile time.
#
# If EPATCH_SUFFIX is empty, then no period before it is implied when searching
# for patches to apply.
#
# Refer to the other EPATCH_xxx variables for more customization of behavior.
epatch() {
_epatch_draw_line() {
# create a line of same length as input string
[[ -z $1 ]] && set "$(printf "%65s" '')"
echo "${1//?/=}"
}
unset P4CONFIG P4PORT P4USER # keep perforce at bay #56402
# First process options. We localize the EPATCH_OPTS setting
# from above so that we can pass it on in the loop below with
# any additional values the user has specified.
local EPATCH_OPTS=( ${EPATCH_OPTS[*]} )
while [[ $# -gt 0 ]] ; do
case $1 in
-*) EPATCH_OPTS+=( "$1" ) ;;
*) break ;;
esac
shift
done
# Let the rest of the code process one user arg at a time --
# each arg may expand into multiple patches, and each arg may
# need to start off with the default global EPATCH_xxx values
if [[ $# -gt 1 ]] ; then
local m
for m in "$@" ; do
epatch "${m}"
done
return 0
fi
local SINGLE_PATCH="no"
# no args means process ${EPATCH_SOURCE}
[[ $# -eq 0 ]] && set -- "${EPATCH_SOURCE}"
if [[ -f $1 ]] ; then
SINGLE_PATCH="yes"
set -- "$1"
# Use the suffix from the single patch (localize it); the code
# below will find the suffix for us
local EPATCH_SUFFIX=$1
elif [[ -d $1 ]] ; then
# We have to force sorting to C so that the wildcard expansion is consistent #471666.
evar_push_set LC_COLLATE C
# Some people like to make dirs of patches w/out suffixes (vim).
set -- "$1"/*${EPATCH_SUFFIX:+."${EPATCH_SUFFIX}"}
evar_pop
elif [[ -f ${EPATCH_SOURCE}/$1 ]] ; then
# Re-use EPATCH_SOURCE as a search dir
epatch "${EPATCH_SOURCE}/$1"
return $?
else
# sanity check ... if it isn't a dir or file, wtf man ?
[[ $# -ne 0 ]] && EPATCH_SOURCE=$1
echo
eerror "Cannot find \$EPATCH_SOURCE! Value for \$EPATCH_SOURCE is:"
eerror
eerror " ${EPATCH_SOURCE}"
eerror " ( ${EPATCH_SOURCE##*/} )"
echo
die "Cannot find \$EPATCH_SOURCE!"
fi
# Now that we know we're actually going to apply something, merge
# all of the patch options back in to a single variable for below.
EPATCH_OPTS="${EPATCH_COMMON_OPTS} ${EPATCH_OPTS[*]}"
local PIPE_CMD
case ${EPATCH_SUFFIX##*\.} in
xz) PIPE_CMD="xz -dc" ;;
lzma) PIPE_CMD="lzma -dc" ;;
bz2) PIPE_CMD="bzip2 -dc" ;;
gz|Z|z) PIPE_CMD="gzip -dc" ;;
ZIP|zip) PIPE_CMD="unzip -p" ;;
*) ;;
esac
[[ ${SINGLE_PATCH} == "no" ]] && einfo "${EPATCH_MULTI_MSG}"
local x
for x in "$@" ; do
# If the patch dir given contains subdirs, or our EPATCH_SUFFIX
# didn't match anything, ignore continue on
[[ ! -f ${x} ]] && continue
local patchname=${x##*/}
# Apply single patches, or forced sets of patches, or
# patches with ARCH dependant names.
# ???_arch_foo.patch
# Else, skip this input altogether
local a=${patchname#*_} # strip the ???_
a=${a%%_*} # strip the _foo.patch
if ! [[ ${SINGLE_PATCH} == "yes" || \
${EPATCH_FORCE} == "yes" || \
${a} == all || \
${a} == ${ARCH} ]]
then
continue
fi
# Let people filter things dynamically
if [[ -n ${EPATCH_EXCLUDE}${EPATCH_USER_EXCLUDE} ]] ; then
# let people use globs in the exclude
eshopts_push -o noglob
local ex
for ex in ${EPATCH_EXCLUDE} ; do
if [[ ${patchname} == ${ex} ]] ; then
einfo " Skipping ${patchname} due to EPATCH_EXCLUDE ..."
eshopts_pop
continue 2
fi
done
for ex in ${EPATCH_USER_EXCLUDE} ; do
if [[ ${patchname} == ${ex} ]] ; then
einfo " Skipping ${patchname} due to EPATCH_USER_EXCLUDE ..."
eshopts_pop
continue 2
fi
done
eshopts_pop
fi
if [[ ${SINGLE_PATCH} == "yes" ]] ; then
if [[ -n ${EPATCH_SINGLE_MSG} ]] ; then
einfo "${EPATCH_SINGLE_MSG}"
else
einfo "Applying ${patchname} ..."
fi
else
einfo " ${patchname} ..."
fi
# Handle aliased patch command #404447 #461568
local patch="patch"
eval $(alias patch 2>/dev/null | sed 's:^alias ::')
# most of the time, there will only be one run per unique name,
# but if there are more, make sure we get unique log filenames
local STDERR_TARGET="${T}/${patchname}.out"
if [[ -e ${STDERR_TARGET} ]] ; then
STDERR_TARGET="${T}/${patchname}-$$.out"
fi
printf "***** %s *****\nPWD: %s\nPATCH TOOL: %s -> %s\nVERSION INFO:\n%s\n\n" \
"${patchname}" \
"${PWD}" \
"${patch}" \
"$(type -P "${patch}")" \
"$(${patch} --version)" \
> "${STDERR_TARGET}"
# Decompress the patch if need be
local count=0
local PATCH_TARGET
if [[ -n ${PIPE_CMD} ]] ; then
PATCH_TARGET="${T}/$$.patch"
echo "PIPE_COMMAND: ${PIPE_CMD} ${x} > ${PATCH_TARGET}" >> "${STDERR_TARGET}"
if ! (${PIPE_CMD} "${x}" > "${PATCH_TARGET}") >> "${STDERR_TARGET}" 2>&1 ; then
echo
eerror "Could not extract patch!"
#die "Could not extract patch!"
count=5
break
fi
else
PATCH_TARGET=${x}
fi
# Check for absolute paths in patches. If sandbox is disabled,
# people could (accidently) patch files in the root filesystem.
# Or trigger other unpleasantries #237667. So disallow -p0 on
# such patches.
local abs_paths=$(egrep -n '^[-+]{3} /' "${PATCH_TARGET}" | awk '$2 != "/dev/null" { print }')
if [[ -n ${abs_paths} ]] ; then
count=1
printf "NOTE: skipping -p0 due to absolute paths in patch:\n%s\n" "${abs_paths}" >> "${STDERR_TARGET}"
fi
# Similar reason, but with relative paths.
local rel_paths=$(egrep -n '^[-+]{3} [^ ]*[.][.]/' "${PATCH_TARGET}")
if [[ -n ${rel_paths} ]] ; then
echo
eerror "Rejected Patch: ${patchname} !"
eerror " ( ${PATCH_TARGET} )"
eerror
eerror "Your patch uses relative paths '../':"
eerror "${rel_paths}"
echo
die "you need to fix the relative paths in patch"
fi
# Dynamically detect the correct -p# ... i'm lazy, so shoot me :/
local patch_cmd
while [[ ${count} -lt 5 ]] ; do
patch_cmd="${patch} -p${count} ${EPATCH_OPTS}"
# Generate some useful debug info ...
(
_epatch_draw_line "***** ${patchname} *****"
echo
echo "PATCH COMMAND: ${patch_cmd} --dry-run -f < '${PATCH_TARGET}'"
echo
_epatch_draw_line "***** ${patchname} *****"
${patch_cmd} --dry-run -f < "${PATCH_TARGET}" 2>&1
ret=$?
echo
echo "patch program exited with status ${ret}"
exit ${ret}
) >> "${STDERR_TARGET}"
if [ $? -eq 0 ] ; then
(
_epatch_draw_line "***** ${patchname} *****"
echo
echo "ACTUALLY APPLYING ${patchname} ..."
echo "PATCH COMMAND: ${patch_cmd} < '${PATCH_TARGET}'"
echo
_epatch_draw_line "***** ${patchname} *****"
${patch_cmd} < "${PATCH_TARGET}" 2>&1
ret=$?
echo
echo "patch program exited with status ${ret}"
exit ${ret}
) >> "${STDERR_TARGET}"
if [ $? -ne 0 ] ; then
echo
eerror "A dry-run of patch command succeeded, but actually"
eerror "applying the patch failed!"
#die "Real world sux compared to the dreamworld!"
count=5
fi
break
fi
: $(( count++ ))
done
(( EPATCH_N_APPLIED_PATCHES++ ))
# if we had to decompress the patch, delete the temp one
if [[ -n ${PIPE_CMD} ]] ; then
rm -f "${PATCH_TARGET}"
fi
if [[ ${count} -ge 5 ]] ; then
echo
eerror "Failed Patch: ${patchname} !"
eerror " ( ${PATCH_TARGET} )"
eerror
eerror "Include in your bugreport the contents of:"
eerror
eerror " ${STDERR_TARGET}"
echo
die "Failed Patch: ${patchname}!"
fi
# if everything worked, delete the full debug patch log
rm -f "${STDERR_TARGET}"
# then log away the exact stuff for people to review later
cat <<-EOF >> "${T}/epatch.log"
PATCH: ${x}
CMD: ${patch_cmd}
PWD: ${PWD}
EOF
eend 0
done
[[ ${SINGLE_PATCH} == "no" ]] && einfo "Done with patching"
: # everything worked
}
case ${EAPI:-0} in
0|1|2|3|4|5)
# @VARIABLE: EPATCH_USER_SOURCE
# @DESCRIPTION:
# Location for user patches, see the epatch_user function.
# Should be set by the user. Don't set this in ebuilds.
: ${EPATCH_USER_SOURCE:=${PORTAGE_CONFIGROOT%/}/etc/portage/patches}
# @FUNCTION: epatch_user
# @USAGE:
# @DESCRIPTION:
# Applies user-provided patches to the source tree. The patches are
# taken from /etc/portage/patches/<CATEGORY>/<P-PR|P|PN>[:SLOT]/, where the first
# of these three directories to exist will be the one to use, ignoring
# any more general directories which might exist as well. They must end
# in ".patch" to be applied.
#
# User patches are intended for quick testing of patches without ebuild
# modifications, as well as for permanent customizations a user might
# desire. Obviously, there can be no official support for arbitrarily
# patched ebuilds. So whenever a build log in a bug report mentions that
# user patches were applied, the user should be asked to reproduce the
# problem without these.
#
# Not all ebuilds do call this function, so placing patches in the
# stated directory might or might not work, depending on the package and
# the eclasses it inherits and uses. It is safe to call the function
# repeatedly, so it is always possible to add a call at the ebuild
# level. The first call is the time when the patches will be
# applied.
#
# Ideally, this function should be called after gentoo-specific patches
# have been applied, so that their code can be modified as well, but
# before calls to e.g. eautoreconf, as the user patches might affect
# autotool input files as well.
epatch_user() {
[[ $# -ne 0 ]] && die "epatch_user takes no options"
# Allow multiple calls to this function; ignore all but the first
local applied="${T}/epatch_user.log"
[[ -e ${applied} ]] && return 2
# don't clobber any EPATCH vars that the parent might want
local EPATCH_SOURCE check
for check in ${CATEGORY}/{${P}-${PR},${P},${PN}}{,:${SLOT%/*}}; do
EPATCH_SOURCE=${EPATCH_USER_SOURCE}/${CTARGET}/${check}
[[ -r ${EPATCH_SOURCE} ]] || EPATCH_SOURCE=${EPATCH_USER_SOURCE}/${CHOST}/${check}
[[ -r ${EPATCH_SOURCE} ]] || EPATCH_SOURCE=${EPATCH_USER_SOURCE}/${check}
if [[ -d ${EPATCH_SOURCE} ]] ; then
local old_n_applied_patches=${EPATCH_N_APPLIED_PATCHES:-0}
EPATCH_SOURCE=${EPATCH_SOURCE} \
EPATCH_SUFFIX="patch" \
EPATCH_FORCE="yes" \
EPATCH_MULTI_MSG="Applying user patches from ${EPATCH_SOURCE} ..." \
epatch
echo "${EPATCH_SOURCE}" > "${applied}"
if [[ ${old_n_applied_patches} -lt ${EPATCH_N_APPLIED_PATCHES} ]]; then
has epatch_user_death_notice ${EBUILD_DEATH_HOOKS} || \
EBUILD_DEATH_HOOKS+=" epatch_user_death_notice"
fi
return 0
fi
done
echo "none" > "${applied}"
return 1
}
# @FUNCTION: epatch_user_death_notice
# @INTERNAL
# @DESCRIPTION:
# Include an explicit notice in the die message itself that user patches were
# applied to this build.
epatch_user_death_notice() {
ewarn "!!! User patches were applied to this build!"
}
esac
_EPATCH_ECLASS=1
fi #_EPATCH_ECLASS

202
eclass/estack.eclass Normal file
View File

@ -0,0 +1,202 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: estack.eclass
# @MAINTAINER:
# base-system@gentoo.org
# @BLURB: stack-like value storage support
# @DESCRIPTION:
# Support for storing values on stack-like variables.
if [[ -z ${_ESTACK_ECLASS} ]]; then
# @FUNCTION: estack_push
# @USAGE: <stack> [items to push]
# @DESCRIPTION:
# Push any number of items onto the specified stack. Pick a name that
# is a valid variable (i.e. stick to alphanumerics), and push as many
# items as you like onto the stack at once.
#
# The following code snippet will echo 5, then 4, then 3, then ...
# @CODE
# estack_push mystack 1 2 3 4 5
# while estack_pop mystack i ; do
# echo "${i}"
# done
# @CODE
estack_push() {
[[ $# -eq 0 ]] && die "estack_push: incorrect # of arguments"
local stack_name="_ESTACK_$1_" ; shift
eval ${stack_name}+=\( \"\$@\" \)
}
# @FUNCTION: estack_pop
# @USAGE: <stack> [variable]
# @DESCRIPTION:
# Pop a single item off the specified stack. If a variable is specified,
# the popped item is stored there. If no more items are available, return
# 1, else return 0. See estack_push for more info.
estack_pop() {
[[ $# -eq 0 || $# -gt 2 ]] && die "estack_pop: incorrect # of arguments"
# We use the fugly _estack_xxx var names to avoid collision with
# passing back the return value. If we used "local i" and the
# caller ran `estack_pop ... i`, we'd end up setting the local
# copy of "i" rather than the caller's copy. The _estack_xxx
# garbage is preferable to using $1/$2 everywhere as that is a
# bit harder to read.
local _estack_name="_ESTACK_$1_" ; shift
local _estack_retvar=$1 ; shift
eval local _estack_i=\${#${_estack_name}\[@\]}
# Don't warn -- let the caller interpret this as a failure
# or as normal behavior (akin to `shift`)
[[ $(( --_estack_i )) -eq -1 ]] && return 1
if [[ -n ${_estack_retvar} ]] ; then
eval ${_estack_retvar}=\"\${${_estack_name}\[${_estack_i}\]}\"
fi
eval unset \"${_estack_name}\[${_estack_i}\]\"
}
# @FUNCTION: evar_push
# @USAGE: <variable to save> [more vars to save]
# @DESCRIPTION:
# This let's you temporarily modify a variable and then restore it (including
# set vs unset semantics). Arrays are not supported at this time.
#
# This is meant for variables where using `local` does not work (such as
# exported variables, or only temporarily changing things in a func).
#
# For example:
# @CODE
# evar_push LC_ALL
# export LC_ALL=C
# ... do some stuff that needs LC_ALL=C set ...
# evar_pop
#
# # You can also save/restore more than one var at a time
# evar_push BUTTERFLY IN THE SKY
# ... do stuff with the vars ...
# evar_pop # This restores just one var, SKY
# ... do more stuff ...
# evar_pop 3 # This pops the remaining 3 vars
# @CODE
evar_push() {
local var val
for var ; do
[[ ${!var+set} == "set" ]] \
&& val=${!var} \
|| val="unset_76fc3c462065bb4ca959f939e6793f94"
estack_push evar "${var}" "${val}"
done
}
# @FUNCTION: evar_push_set
# @USAGE: <variable to save> [new value to store]
# @DESCRIPTION:
# This is a handy shortcut to save and temporarily set a variable. If a value
# is not specified, the var will be unset.
evar_push_set() {
local var=$1
evar_push ${var}
case $# in
1) unset ${var} ;;
2) printf -v "${var}" '%s' "$2" ;;
*) die "${FUNCNAME}: incorrect # of args: $*" ;;
esac
}
# @FUNCTION: evar_pop
# @USAGE: [number of vars to restore]
# @DESCRIPTION:
# Restore the variables to the state saved with the corresponding
# evar_push call. See that function for more details.
evar_pop() {
local cnt=${1:-bad}
case $# in
0) cnt=1 ;;
1) [[ -z ${cnt//[0-9]} ]] \
|| die "${FUNCNAME}: first arg must be a number: $*" ;;
*) die "${FUNCNAME}: only accepts one arg: $*" ;;
esac
local var val
while (( cnt-- )) ; do
estack_pop evar val || die "${FUNCNAME}: unbalanced push"
estack_pop evar var || die "${FUNCNAME}: unbalanced push"
[[ ${val} == "unset_76fc3c462065bb4ca959f939e6793f94" ]] \
&& unset ${var} \
|| printf -v "${var}" '%s' "${val}"
done
}
# @FUNCTION: eshopts_push
# @USAGE: [options to `set` or `shopt`]
# @DESCRIPTION:
# Often times code will want to enable a shell option to change code behavior.
# Since changing shell options can easily break other pieces of code (which
# assume the default state), eshopts_push is used to (1) push the current shell
# options onto a stack and (2) pass the specified arguments to set.
#
# If the first argument is '-s' or '-u', we assume you want to call `shopt`
# rather than `set` as there are some options only available via that.
#
# A common example is to disable shell globbing so that special meaning/care
# may be used with variables/arguments to custom functions. That would be:
# @CODE
# eshopts_push -o noglob
# for x in ${foo} ; do
# if ...some check... ; then
# eshopts_pop
# return 0
# fi
# done
# eshopts_pop
# @CODE
eshopts_push() {
# Save both "shopt" and "set -o" option sets, because otherwise
# restoring posix would disable expand_aliases by side effect. #662586
estack_push eshopts "$(shopt -p -o) $(shopt -p)"
if [[ $1 == -[su] ]] ; then
[[ $# -le 1 ]] && return 0
shopt "$@" || die "${FUNCNAME}: bad options to shopt: $*"
else
[[ $# -eq 0 ]] && return 0
set "$@" || die "${FUNCNAME}: bad options to set: $*"
fi
}
# @FUNCTION: eshopts_pop
# @USAGE:
# @DESCRIPTION:
# Restore the shell options to the state saved with the corresponding
# eshopts_push call. See that function for more details.
eshopts_pop() {
local s
estack_pop eshopts s || die "${FUNCNAME}: unbalanced push"
eval "${s}" || die "${FUNCNAME}: sanity: invalid shopt options: ${s}"
}
# @FUNCTION: eumask_push
# @USAGE: <new umask>
# @DESCRIPTION:
# Set the umask to the new value specified while saving the previous
# value onto a stack. Useful for temporarily changing the umask.
eumask_push() {
estack_push eumask "$(umask)"
umask "$@" || die "${FUNCNAME}: bad options to umask: $*"
}
# @FUNCTION: eumask_pop
# @USAGE:
# @DESCRIPTION:
# Restore the previous umask state.
eumask_pop() {
[[ $# -eq 0 ]] || die "${FUNCNAME}: we take no options"
local s
estack_pop eumask s || die "${FUNCNAME}: unbalanced push"
umask ${s} || die "${FUNCNAME}: sanity: could not restore umask: ${s}"
}
_ESTACK_ECLASS=1
fi #_ESTACK_ECLASS

214
eclass/eutils.eclass Normal file
View File

@ -0,0 +1,214 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: eutils.eclass
# @MAINTAINER:
# base-system@gentoo.org
# @SUPPORTED_EAPIS: 0 1 2 3 4 5 6 7
# @BLURB: many extra (but common) functions that are used in ebuilds
# @DESCRIPTION:
# The eutils eclass contains a suite of functions that complement
# the ones that ebuild.sh already contain. The idea is that the functions
# are not required in all ebuilds but enough utilize them to have a common
# home rather than having multiple ebuilds implementing the same thing.
#
# Due to the nature of this eclass, some functions may have maintainers
# different from the overall eclass!
#
# This eclass is DEPRECATED and must not be inherited by any new ebuilds
# or eclasses. Use the more specific split eclasses instead, or native
# package manager functions when available.
if [[ -z ${_EUTILS_ECLASS} ]]; then
_EUTILS_ECLASS=1
# implicitly inherited (now split) eclasses
case ${EAPI:-0} in
0|1|2|3|4|5|6)
inherit desktop edos2unix epatch estack l10n ltprune multilib \
preserve-libs toolchain-funcs vcs-clean wrapper
;;
7) inherit edos2unix l10n wrapper ;;
*) die "${ECLASS} is banned in EAPI ${EAPI}" ;;
esac
# @FUNCTION: emktemp
# @USAGE: [temp dir]
# @DESCRIPTION:
# Cheap replacement for when coreutils (and thus mktemp) does not exist
# on the user's system.
emktemp() {
eqawarn "emktemp is deprecated. Create a temporary file in \${T} instead."
local exe="touch"
[[ $1 == -d ]] && exe="mkdir" && shift
local topdir=$1
if [[ -z ${topdir} ]] ; then
[[ -z ${T} ]] \
&& topdir="/tmp" \
|| topdir=${T}
fi
if ! type -P mktemp > /dev/null ; then
# system lacks `mktemp` so we have to fake it
local tmp=/
while [[ -e ${tmp} ]] ; do
tmp=${topdir}/tmp.${RANDOM}.${RANDOM}.${RANDOM}
done
${exe} "${tmp}" || ${exe} -p "${tmp}"
echo "${tmp}"
else
# the args here will give slightly wierd names on BSD,
# but should produce a usable file on all userlands
if [[ ${exe} == "touch" ]] ; then
TMPDIR="${topdir}" mktemp -t tmp.XXXXXXXXXX
else
TMPDIR="${topdir}" mktemp -dt tmp.XXXXXXXXXX
fi
fi
}
path_exists() {
eerror "path_exists has been removed. Please see the following post"
eerror "for a replacement snippet:"
eerror "https://blogs.gentoo.org/mgorny/2018/08/09/inlining-path_exists/"
die "path_exists is banned"
}
# @FUNCTION: use_if_iuse
# @USAGE: <flag>
# @DESCRIPTION:
# Return true if the given flag is in USE and IUSE.
#
# Note that this function should not be used in the global scope.
use_if_iuse() {
eqawarn "use_if_iuse is deprecated."
eqawarn "Define it as a local function, or inline it:"
eqawarn " in_iuse foo && use foo"
in_iuse $1 || return 1
use $1
}
case ${EAPI:-0} in
0|1|2|3|4)
# @FUNCTION: usex
# @USAGE: <USE flag> [true output] [false output] [true suffix] [false suffix]
# @DESCRIPTION:
# Proxy to declare usex for package managers or EAPIs that do not provide it
# and use the package manager implementation when available (i.e. EAPI >= 5).
# If USE flag is set, echo [true output][true suffix] (defaults to "yes"),
# otherwise echo [false output][false suffix] (defaults to "no").
usex() { use "$1" && echo "${2-yes}$4" || echo "${3-no}$5" ; } #382963
;;
esac
case ${EAPI:-0} in
0|1|2|3|4|5)
# @FUNCTION: einstalldocs
# @DESCRIPTION:
# Install documentation using DOCS and HTML_DOCS, in EAPIs that do not
# provide this function. When available (i.e., in EAPI 6 or later),
# the package manager implementation should be used instead.
#
# If DOCS is declared and non-empty, all files listed in it are
# installed. The files must exist, otherwise the function will fail.
# In EAPI 4 and 5, DOCS may specify directories as well; in earlier
# EAPIs using directories is unsupported.
#
# If DOCS is not declared, the files matching patterns given
# in the default EAPI implementation of src_install will be installed.
# If this is undesired, DOCS can be set to empty value to prevent any
# documentation from being installed.
#
# If HTML_DOCS is declared and non-empty, all files and/or directories
# listed in it are installed as HTML docs (using dohtml).
#
# Both DOCS and HTML_DOCS can either be an array or a whitespace-
# separated list. Whenever directories are allowed, '<directory>/.' may
# be specified in order to install all files within the directory
# without creating a sub-directory in docdir.
#
# Passing additional options to dodoc and dohtml is not supported.
# If you needed such a thing, you need to call those helpers explicitly.
einstalldocs() {
debug-print-function ${FUNCNAME} "${@}"
local dodoc_opts=-r
has ${EAPI} 0 1 2 3 && dodoc_opts=
if ! declare -p DOCS &>/dev/null ; then
local d
for d in README* ChangeLog AUTHORS NEWS TODO CHANGES \
THANKS BUGS FAQ CREDITS CHANGELOG ; do
if [[ -s ${d} ]] ; then
dodoc "${d}" || die
fi
done
elif [[ $(declare -p DOCS) == "declare -a"* ]] ; then
if [[ ${DOCS[@]} ]] ; then
dodoc ${dodoc_opts} "${DOCS[@]}" || die
fi
else
if [[ ${DOCS} ]] ; then
dodoc ${dodoc_opts} ${DOCS} || die
fi
fi
if [[ $(declare -p HTML_DOCS 2>/dev/null) == "declare -a"* ]] ; then
if [[ ${HTML_DOCS[@]} ]] ; then
dohtml -r "${HTML_DOCS[@]}" || die
fi
else
if [[ ${HTML_DOCS} ]] ; then
dohtml -r ${HTML_DOCS} || die
fi
fi
return 0
}
# @FUNCTION: in_iuse
# @USAGE: <flag>
# @DESCRIPTION:
# Determines whether the given flag is in IUSE. Strips IUSE default
# prefixes as necessary. In EAPIs where it is available (i.e., EAPI 6
# or later), the package manager implementation should be used instead.
#
# Note that this function must not be used in the global scope.
in_iuse() {
debug-print-function ${FUNCNAME} "${@}"
[[ ${#} -eq 1 ]] || die "Invalid args to ${FUNCNAME}()"
local flag=${1}
local liuse=( ${IUSE} )
has "${flag}" "${liuse[@]#[+-]}"
}
;;
esac
case ${EAPI:-0} in
0|1|2|3|4|5|6)
# @FUNCTION: eqawarn
# @USAGE: [message]
# @DESCRIPTION:
# Proxy to ewarn for package managers that don't provide eqawarn and use the PM
# implementation if available. Reuses PORTAGE_ELOG_CLASSES as set by the dev
# profile.
if ! declare -F eqawarn >/dev/null ; then
eqawarn() {
has qa ${PORTAGE_ELOG_CLASSES} && ewarn "$@"
:
}
fi
;;
esac
fi

194
eclass/fcaps.eclass Normal file
View File

@ -0,0 +1,194 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: fcaps.eclass
# @MAINTAINER:
# base-system@gentoo.org
# @BLURB: function to set POSIX file-based capabilities
# @DESCRIPTION:
# This eclass provides a function to set file-based capabilities on binaries.
# This is not the same as USE=caps which controls runtime capability changes,
# often via packages like libcap.
#
# Due to probable capability-loss on moving or copying, this happens in
# pkg_postinst phase (at least for now).
#
# @EXAMPLE:
# You can manually set the caps on ping and ping6 by doing:
# @CODE
# pkg_postinst() {
# fcaps cap_net_raw bin/ping bin/ping6
# }
# @CODE
#
# Or set it via the global ebuild var FILECAPS:
# @CODE
# FILECAPS=(
# cap_net_raw bin/ping bin/ping6
# )
# @CODE
if [[ -z ${_FCAPS_ECLASS} ]]; then
_FCAPS_ECLASS=1
IUSE="+filecaps"
# Since it is needed in pkg_postinst() it must be in RDEPEND
case "${EAPI:-0}" in
[0-6])
RDEPEND="filecaps? ( sys-libs/libcap )"
;;
*)
BDEPEND="filecaps? ( sys-libs/libcap )"
RDEPEND="${BDEPEND}"
;;
esac
# @ECLASS-VARIABLE: FILECAPS
# @DEFAULT_UNSET
# @DESCRIPTION:
# An array of fcap arguments to use to automatically execute fcaps. See that
# function for more details.
#
# All args are consumed until the '--' marker is found. So if you have:
# @CODE
# FILECAPS=( moo cow -- fat cat -- chubby penguin )
# @CODE
#
# This will end up executing:
# @CODE
# fcaps moo cow
# fcaps fat cat
# fcaps chubby penguin
# @CODE
#
# Note: If you override pkg_postinst, you must call fcaps_pkg_postinst yourself.
# @FUNCTION: fcaps
# @USAGE: [-o <owner>] [-g <group>] [-m <mode>] [-M <caps mode>] <capabilities> <file[s]>
# @DESCRIPTION:
# Sets the specified capabilities on the specified files.
#
# The caps option takes the form as expected by the cap_from_text(3) man page.
# If no action is specified, then "=ep" will be used as a default.
#
# If the file is a relative path (e.g. bin/foo rather than /bin/foo), then the
# appropriate path var ($D/$ROOT/etc...) will be prefixed based on the current
# ebuild phase.
#
# The caps mode (default 711) is used to set the permission on the file if
# capabilities were properly set on the file.
#
# If the system is unable to set capabilities, it will use the specified user,
# group, and mode (presumably to make the binary set*id). The defaults there
# are root:0 and 4711. Otherwise, the ownership and permissions will be
# unchanged.
fcaps() {
debug-print-function ${FUNCNAME} "$@"
if [[ ${EUID} != 0 ]] ; then
einfo "Insufficient privileges to execute ${FUNCNAME}, skipping."
return 0
fi
# Process the user options first.
local owner='root'
local group='0'
local mode='4711'
local caps_mode='711'
while [[ $# -gt 0 ]] ; do
case $1 in
-o) owner=$2; shift;;
-g) group=$2; shift;;
-m) mode=$2; shift;;
-M) caps_mode=$2; shift;;
*) break;;
esac
shift
done
[[ $# -lt 2 ]] && die "${FUNCNAME}: wrong arg count"
local caps=$1
[[ ${caps} == *[-=+]* ]] || caps+="=ep"
shift
local root
case ${EBUILD_PHASE} in
compile|install|preinst)
root=${ED:-${D}}
;;
postinst)
root=${EROOT:-${ROOT}}
;;
esac
root=${root%/}
# Process every file!
local file
for file ; do
[[ ${file} != /* ]] && file="${root}/${file}"
if use filecaps ; then
# Try to set capabilities. Ignore errors when the
# fs doesn't support it, but abort on all others.
debug-print "${FUNCNAME}: setting caps '${caps}' on '${file}'"
# If everything goes well, we don't want the file to be readable
# by people.
chmod ${caps_mode} "${file}" || die
if ! out=$(LC_ALL=C setcap "${caps}" "${file}" 2>&1) ; then
case ${out} in
# ENOTSUP and EOPNOTSUPP might be the same value which means
# strerror() on them is unstable -- we can get both. #559608
*"Not supported"*|\
*"Operation not supported"*)
local fstype=$(stat -f -c %T "${file}")
ewarn "Could not set caps on '${file}' due to missing filesystem support:"
ewarn "* enable XATTR support for '${fstype}' in your kernel (if configurable)"
ewarn "* mount the fs with the user_xattr option (if not the default)"
ewarn "* enable the relevant FS_SECURITY option (if configurable)"
;;
*)
eerror "Setting caps '${caps}' on file '${file}' failed:"
eerror "${out}"
die "could not set caps"
;;
esac
else
# Sanity check that everything took.
setcap -v "${caps}" "${file}" >/dev/null \
|| die "Checking caps '${caps}' on '${file}' failed"
# Everything worked. Move on to the next file.
continue
fi
fi
# If we're still here, setcaps failed.
debug-print "${FUNCNAME}: setting owner/mode on '${file}'"
chown "${owner}:${group}" "${file}" || die
chmod ${mode} "${file}" || die
done
}
# @FUNCTION: fcaps_pkg_postinst
# @DESCRIPTION:
# Process the FILECAPS array.
fcaps_pkg_postinst() {
local arg args=()
for arg in "${FILECAPS[@]}" "--" ; do
if [[ ${arg} == "--" ]] ; then
fcaps "${args[@]}"
args=()
else
args+=( "${arg}" )
fi
done
}
EXPORT_FUNCTIONS pkg_postinst
fi

59
eclass/findlib.eclass Normal file
View File

@ -0,0 +1,59 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: findlib.eclass
# @MAINTAINER:
# maintainer-needed@gentoo.org
# @AUTHOR:
# Original author: Matthieu Sozeau <mattam@gentoo.org> (retired)
# @BLURB: ocamlfind (a.k.a. findlib) eclass
# @DESCRIPTION:
# ocamlfind (a.k.a. findlib) eclass
# Do not complain about CFLAGS etc since ml projects do not use them.
QA_FLAGS_IGNORED='.*'
# From this findlib version there is proper stublibs support.
DEPEND=">=dev-ml/findlib-1.0.4-r1"
[[ ${FINDLIB_USE} ]] && DEPEND="${FINDLIB_USE}? ( ${DEPEND} )"
check_ocamlfind() {
if [ ! -x "${EPREFIX}"/usr/bin/ocamlfind ]
then
eerror "In findlib.eclass: could not find the ocamlfind executable"
eerror "Please report this bug on gentoo's bugzilla, assigning to ml@gentoo.org"
die "ocamlfind executabled not found"
fi
}
# @FUNCTION: findlib_src_preinst
# @DESCRIPTION:
# Prepare the image for a findlib installation.
# We use the stublibs style, so no ld.conf needs to be
# updated when a package installs C shared libraries.
findlib_src_preinst() {
has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
has "${EAPI:-0}" 0 1 2 && use !prefix && ED="${D}"
check_ocamlfind
# destdir is the ocaml sitelib
local destdir=`ocamlfind printconf destdir`
# strip off prefix
destdir=${destdir#${EPREFIX}}
dodir ${destdir} || die "dodir failed"
export OCAMLFIND_DESTDIR=${ED}${destdir}
# stublibs style
dodir ${destdir}/stublibs || die "dodir failed"
export OCAMLFIND_LDCONF=ignore
}
# @FUNCTION: findlib_src_install
# @DESCRIPTION:
# Install with a properly setup findlib
findlib_src_install() {
findlib_src_preinst
make DESTDIR="${D}" "$@" install || die "make failed"
}

View File

@ -0,0 +1,41 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: fixheadtails.eclass
# @MAINTAINER:
# base-system@gentoo.org
# @AUTHOR:
# Original author John Mylchreest <johnm@gentoo.org>
# @BLURB: functions to replace obsolete head/tail with POSIX compliant ones
_do_sed_fix() {
einfo " - fixed $1"
sed -i \
-e 's/head \+-\([0-9]\)/head -n \1/g' \
-e 's/tail \+\([-+][0-9]\+\)c/tail -c \1/g' \
-e 's/tail \+\([-+][0-9]\)/tail -n \1/g' ${1} || \
die "sed ${1} failed"
}
# @FUNCTION: ht_fix_file
# @USAGE: <files>
# @DESCRIPTION:
# Fix all the specified files.
ht_fix_file() {
local i
einfo "Replacing obsolete head/tail with POSIX compliant ones"
for i in "$@" ; do
_do_sed_fix "$i"
done
}
# @FUNCTION: ht_fix_all
# @DESCRIPTION:
# Find and fix all files in the current directory as needed.
ht_fix_all() {
local MATCHES
MATCHES=$(grep -l -s -i -R -e "head -[ 0-9]" -e "tail [+-][ 0-9]" * | sort -u)
[[ -n ${MATCHES} ]] \
&& ht_fix_file ${MATCHES} \
|| einfo "No need for ht_fix_all anymore !"
}

776
eclass/flag-o-matic.eclass Normal file
View File

@ -0,0 +1,776 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: flag-o-matic.eclass
# @MAINTAINER:
# toolchain@gentoo.org
# @BLURB: common functions to manipulate and query toolchain flags
# @DESCRIPTION:
# This eclass contains a suite of functions to help developers sanely
# and safely manage toolchain flags in their builds.
if [[ -z ${_FLAG_O_MATIC_ECLASS} ]]; then
_FLAG_O_MATIC_ECLASS=1
inherit eutils toolchain-funcs multilib
# Return all the flag variables that our high level funcs operate on.
all-flag-vars() {
echo {ADA,C,CPP,CXX,CCAS,F,FC,LD}FLAGS
}
# {C,CPP,CXX,CCAS,F,FC,LD}FLAGS that we allow in strip-flags
# Note: shell globs and character lists are allowed
setup-allowed-flags() {
ALLOWED_FLAGS=(
-pipe -O '-O[12sg]' -mcpu -march -mtune
'-fstack-protector*' '-fsanitize*' '-fstack-check*' -fno-stack-check
-fbounds-check -fbounds-checking -fno-strict-overflow
-fno-PIE -fno-pie -nopie -no-pie -fno-unit-at-a-time
# debugging symbols should generally be very safe to add
-g '-g[0-9]'
-ggdb '-ggdb[0-9]'
-gdwarf '-gdwarf-*'
-gstabs -gstabs+
-gz
-fno-ident -fpermissive -frecord-gcc-switches
'-fdiagnostics*' '-fplugin*'
'-W*' -w
# CPPFLAGS and LDFLAGS
'-[DUILR]*' '-Wl,*'
# Linker choice flag
'-fuse-ld'
)
# allow a bunch of flags that negate features / control ABI
ALLOWED_FLAGS+=(
'-fno-stack-protector*' '-fabi-version=*'
-fno-strict-aliasing -fno-bounds-check -fno-bounds-checking -fstrict-overflow
-fno-omit-frame-pointer '-fno-builtin*'
)
ALLOWED_FLAGS+=(
-mregparm -mno-app-regs -mapp-regs -mno-mmx -mno-sse
-mno-sse2 -mno-sse3 -mno-ssse3 -mno-sse4 -mno-sse4.1 -mno-sse4.2
-mno-avx -mno-aes -mno-pclmul -mno-sse4a -mno-3dnow -mno-popcnt
-mno-abm -mips1 -mips2 -mips3 -mips4 -mips32 -mips64 -mips16 -mplt
-msoft-float -mno-soft-float -mhard-float -mno-hard-float -mfpu
-mieee -mieee-with-inexact -mschedule -mfloat-gprs -mspe -mno-spe
-mtls-direct-seg-refs -mno-tls-direct-seg-refs -mflat -mno-flat
-mno-faster-structs -mfaster-structs -m32 -m64 -mx32 -mabi
-mlittle-endian -mbig-endian -EL -EB -fPIC -mlive-g0 -mcmodel
-mstack-bias -mno-stack-bias -msecure-plt '-m*-toc' -mfloat-abi
-mfix-r4000 -mno-fix-r4000 -mfix-r4400 -mno-fix-r4400
-mfix-rm7000 -mno-fix-rm7000 -mfix-r10000 -mno-fix-r10000
-mr10k-cache-barrier -mthumb -marm
# gcc 4.5
-mno-fma4 -mno-movbe -mno-xop -mno-lwp
# gcc 4.6
-mno-fsgsbase -mno-rdrnd -mno-f16c -mno-bmi -mno-tbm
# gcc 4.7
-mno-avx2 -mno-bmi2 -mno-fma -mno-lzcnt
# gcc 4.8
-mno-fxsr -mno-hle -mno-rtm -mno-xsave -mno-xsaveopt
# gcc 4.9
-mno-avx512cd -mno-avx512er -mno-avx512f -mno-avx512pf -mno-sha
)
# Allow some safe individual flags. Should come along with the bug reference.
ALLOWED_FLAGS+=(
# Allow explicit stack realignment to run non-conformant
# binaries: bug #677852
-mstackrealign
)
}
# inverted filters for hardened compiler. This is trying to unpick
# the hardened compiler defaults.
_filter-hardened() {
local f
for f in "$@" ; do
case "${f}" in
# Ideally we should only concern ourselves with PIE flags,
# not -fPIC or -fpic, but too many places filter -fPIC without
# thinking about -fPIE.
-fPIC|-fpic|-fPIE|-fpie|-Wl,pie|-pie)
gcc-specs-pie || continue
if ! is-flagq -nopie && ! is-flagq -no-pie ; then
# Support older Gentoo form first (-nopie) before falling
# back to the official gcc-6+ form (-no-pie).
if test-flags -nopie >/dev/null ; then
append-flags -nopie
else
append-flags -no-pie
fi
fi
;;
-fstack-protector)
gcc-specs-ssp || continue
is-flagq -fno-stack-protector || append-flags $(test-flags -fno-stack-protector);;
-fstack-protector-all)
gcc-specs-ssp-to-all || continue
is-flagq -fno-stack-protector-all || append-flags $(test-flags -fno-stack-protector-all);;
-fno-strict-overflow)
gcc-specs-nostrict || continue
is-flagq -fstrict-overflow || append-flags $(test-flags -fstrict-overflow);;
esac
done
}
# Remove occurrences of strings from variable given in $1
# Strings removed are matched as globs, so for example
# '-O*' would remove -O1, -O2 etc.
_filter-var() {
local f x var=$1 new=()
shift
for f in ${!var} ; do
for x in "$@" ; do
# Note this should work with globs like -O*
[[ ${f} == ${x} ]] && continue 2
done
new+=( "${f}" )
done
export ${var}="${new[*]}"
}
# @FUNCTION: filter-flags
# @USAGE: <flags>
# @DESCRIPTION:
# Remove particular <flags> from {C,CPP,CXX,CCAS,F,FC,LD}FLAGS. Accepts shell globs.
filter-flags() {
_filter-hardened "$@"
local v
for v in $(all-flag-vars) ; do
_filter-var ${v} "$@"
done
return 0
}
# @FUNCTION: filter-lfs-flags
# @DESCRIPTION:
# Remove flags that enable Large File Support.
filter-lfs-flags() {
[[ $# -ne 0 ]] && die "filter-lfs-flags takes no arguments"
# http://www.gnu.org/s/libc/manual/html_node/Feature-Test-Macros.html
# _LARGEFILE_SOURCE: enable support for new LFS funcs (ftello/etc...)
# _LARGEFILE64_SOURCE: enable support for 64bit variants (off64_t/fseeko64/etc...)
# _FILE_OFFSET_BITS: default to 64bit variants (off_t is defined as off64_t)
filter-flags -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE -D_LARGEFILE64_SOURCE
}
# @FUNCTION: filter-ldflags
# @USAGE: <flags>
# @DESCRIPTION:
# Remove particular <flags> from LDFLAGS. Accepts shell globs.
filter-ldflags() {
_filter-var LDFLAGS "$@"
return 0
}
# @FUNCTION: append-cppflags
# @USAGE: <flags>
# @DESCRIPTION:
# Add extra <flags> to the current CPPFLAGS.
append-cppflags() {
[[ $# -eq 0 ]] && return 0
export CPPFLAGS+=" $*"
return 0
}
# @FUNCTION: append-cflags
# @USAGE: <flags>
# @DESCRIPTION:
# Add extra <flags> to the current CFLAGS. If a flag might not be supported
# with different compilers (or versions), then use test-flags-CC like so:
# @CODE
# append-cflags $(test-flags-CC -funky-flag)
# @CODE
append-cflags() {
[[ $# -eq 0 ]] && return 0
# Do not do automatic flag testing ourselves. #417047
export CFLAGS+=" $*"
return 0
}
# @FUNCTION: append-cxxflags
# @USAGE: <flags>
# @DESCRIPTION:
# Add extra <flags> to the current CXXFLAGS. If a flag might not be supported
# with different compilers (or versions), then use test-flags-CXX like so:
# @CODE
# append-cxxflags $(test-flags-CXX -funky-flag)
# @CODE
append-cxxflags() {
[[ $# -eq 0 ]] && return 0
# Do not do automatic flag testing ourselves. #417047
export CXXFLAGS+=" $*"
return 0
}
# @FUNCTION: append-fflags
# @USAGE: <flags>
# @DESCRIPTION:
# Add extra <flags> to the current {F,FC}FLAGS. If a flag might not be supported
# with different compilers (or versions), then use test-flags-F77 like so:
# @CODE
# append-fflags $(test-flags-F77 -funky-flag)
# @CODE
append-fflags() {
[[ $# -eq 0 ]] && return 0
# Do not do automatic flag testing ourselves. #417047
export FFLAGS+=" $*"
export FCFLAGS+=" $*"
return 0
}
# @FUNCTION: append-lfs-flags
# @DESCRIPTION:
# Add flags that enable Large File Support.
append-lfs-flags() {
[[ $# -ne 0 ]] && die "append-lfs-flags takes no arguments"
# see comments in filter-lfs-flags func for meaning of these
append-cppflags -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE -D_LARGEFILE64_SOURCE
}
# @FUNCTION: append-ldflags
# @USAGE: <flags>
# @DESCRIPTION:
# Add extra <flags> to the current LDFLAGS.
append-ldflags() {
[[ $# -eq 0 ]] && return 0
local flag
for flag in "$@"; do
[[ ${flag} == -l* ]] && \
eqawarn "Appending a library link instruction (${flag}); libraries to link to should not be passed through LDFLAGS"
done
export LDFLAGS="${LDFLAGS} $*"
return 0
}
# @FUNCTION: append-flags
# @USAGE: <flags>
# @DESCRIPTION:
# Add extra <flags> to your current {C,CXX,F,FC}FLAGS.
append-flags() {
[[ $# -eq 0 ]] && return 0
case " $* " in
*' '-[DIU]*) eqawarn 'please use append-cppflags for preprocessor flags' ;;
*' '-L*|\
*' '-Wl,*) eqawarn 'please use append-ldflags for linker flags' ;;
esac
append-cflags "$@"
append-cxxflags "$@"
append-fflags "$@"
return 0
}
# @FUNCTION: replace-flags
# @USAGE: <old> <new>
# @DESCRIPTION:
# Replace the <old> flag with <new>. Accepts shell globs for <old>.
replace-flags() {
[[ $# != 2 ]] && die "Usage: replace-flags <old flag> <new flag>"
local f var new
for var in $(all-flag-vars) ; do
# Looping over the flags instead of using a global
# substitution ensures that we're working with flag atoms.
# Otherwise globs like -O* have the potential to wipe out the
# list of flags.
new=()
for f in ${!var} ; do
# Note this should work with globs like -O*
[[ ${f} == ${1} ]] && f=${2}
new+=( "${f}" )
done
export ${var}="${new[*]}"
done
return 0
}
# @FUNCTION: replace-cpu-flags
# @USAGE: <old> <new>
# @DESCRIPTION:
# Replace cpu flags (like -march/-mcpu/-mtune) that select the <old> cpu
# with flags that select the <new> cpu. Accepts shell globs for <old>.
replace-cpu-flags() {
local newcpu="$#" ; newcpu="${!newcpu}"
while [ $# -gt 1 ] ; do
# quote to make sure that no globbing is done (particularly on
# ${oldcpu}) prior to calling replace-flags
replace-flags "-march=${1}" "-march=${newcpu}"
replace-flags "-mcpu=${1}" "-mcpu=${newcpu}"
replace-flags "-mtune=${1}" "-mtune=${newcpu}"
shift
done
return 0
}
_is_flagq() {
local x var="$1[*]"
for x in ${!var} ; do
[[ ${x} == $2 ]] && return 0
done
return 1
}
# @FUNCTION: is-flagq
# @USAGE: <flag>
# @DESCRIPTION:
# Returns shell true if <flag> is in {C,CXX,F,FC}FLAGS, else returns shell false. Accepts shell globs.
is-flagq() {
[[ -n $2 ]] && die "Usage: is-flag <flag>"
local var
for var in $(all-flag-vars) ; do
_is_flagq ${var} "$1" && return 0
done
return 1
}
# @FUNCTION: is-flag
# @USAGE: <flag>
# @DESCRIPTION:
# Echo's "true" if flag is set in {C,CXX,F,FC}FLAGS. Accepts shell globs.
is-flag() {
is-flagq "$@" && echo true
}
# @FUNCTION: is-ldflagq
# @USAGE: <flag>
# @DESCRIPTION:
# Returns shell true if <flag> is in LDFLAGS, else returns shell false. Accepts shell globs.
is-ldflagq() {
[[ -n $2 ]] && die "Usage: is-ldflag <flag>"
_is_flagq LDFLAGS $1
}
# @FUNCTION: is-ldflag
# @USAGE: <flag>
# @DESCRIPTION:
# Echo's "true" if flag is set in LDFLAGS. Accepts shell globs.
is-ldflag() {
is-ldflagq "$@" && echo true
}
# @FUNCTION: filter-mfpmath
# @USAGE: <math types>
# @DESCRIPTION:
# Remove specified math types from the fpmath flag. For example, if the user
# has -mfpmath=sse,386, running `filter-mfpmath sse` will leave the user with
# -mfpmath=386.
filter-mfpmath() {
local orig_mfpmath new_math prune_math
# save the original -mfpmath flag
orig_mfpmath=$(get-flag -mfpmath)
# get the value of the current -mfpmath flag
new_math=$(get-flag mfpmath)
# convert "both" to something we can filter
new_math=${new_math/both/387,sse}
new_math=" ${new_math//[,+]/ } "
# figure out which math values are to be removed
prune_math=""
for prune_math in "$@" ; do
new_math=${new_math/ ${prune_math} / }
done
new_math=$(echo ${new_math})
new_math=${new_math// /,}
if [[ -z ${new_math} ]] ; then
# if we're removing all user specified math values are
# slated for removal, then we just filter the flag
filter-flags ${orig_mfpmath}
else
# if we only want to filter some of the user specified
# math values, then we replace the current flag
replace-flags ${orig_mfpmath} -mfpmath=${new_math}
fi
return 0
}
# @FUNCTION: strip-flags
# @DESCRIPTION:
# Strip *FLAGS of everything except known good/safe flags. This runs over all
# flags returned by all_flag_vars().
strip-flags() {
[[ $# -ne 0 ]] && die "strip-flags takes no arguments"
local x y var
local ALLOWED_FLAGS
setup-allowed-flags
set -f # disable pathname expansion
for var in $(all-flag-vars) ; do
local new=()
for x in ${!var} ; do
local flag=${x%%=*}
for y in "${ALLOWED_FLAGS[@]}" ; do
if [[ -z ${flag%%${y}} ]] ; then
new+=( "${x}" )
break
fi
done
done
# In case we filtered out all optimization flags fallback to -O2
if _is_flagq ${var} "-O*" && ! _is_flagq new "-O*" ; then
new+=( -O2 )
fi
if [[ ${!var} != "${new[*]}" ]] ; then
einfo "strip-flags: ${var}: changed '${!var}' to '${new[*]}'"
fi
export ${var}="${new[*]}"
done
set +f # re-enable pathname expansion
return 0
}
test-flag-PROG() {
local comp=$1
local lang=$2
shift 2
if [[ -z ${comp} ]]; then
return 1
fi
if [[ -z $1 ]]; then
return 1
fi
# verify selected compiler exists before using it
comp=($(tc-get${comp}))
# 'comp' can already contain compiler options.
# 'type' needs a binary name
if ! type -p ${comp[0]} >/dev/null; then
return 1
fi
# Set up test file.
local in_src in_ext cmdline_extra=()
case "${lang}" in
# compiler/assembler only
c)
in_ext='c'
in_src='int main(void) { return 0; }'
cmdline_extra+=(-xc -c)
;;
c++)
in_ext='cc'
in_src='int main(void) { return 0; }'
cmdline_extra+=(-xc++ -c)
;;
f77)
in_ext='f'
# fixed source form
in_src=' end'
cmdline_extra+=(-xf77 -c)
;;
f95)
in_ext='f90'
in_src='end'
cmdline_extra+=(-xf95 -c)
;;
# C compiler/assembler/linker
c+ld)
in_ext='c'
in_src='int main(void) { return 0; }'
cmdline_extra+=(-xc)
;;
esac
local test_in=${T}/test-flag.${in_ext}
local test_out=${T}/test-flag.exe
printf "%s\n" "${in_src}" > "${test_in}" || die "Failed to create '${test_in}'"
# Currently we rely on warning-free output of a compiler
# before the flag to see if a flag prduces any warnings.
# This has a few drawbacks:
# - if compiler already generates warnings we filter out
# every single flag: bug #712488
# - if user actually wants to see warnings we just strip
# them regardless of warnings type.
#
# We can add more selective detection of no-op flags via
# '-Werror=ignored-optimization-argument' and similar error options
# similar to what we are doing with '-Qunused-arguments'.
local cmdline=(
"${comp[@]}"
# Clang will warn about unknown gcc flags but exit 0.
# Need -Werror to force it to exit non-zero.
-Werror
"$@"
# -x<lang> options need to go before first source file
"${cmdline_extra[@]}"
"${test_in}" -o "${test_out}"
)
if ! "${cmdline[@]}" &>/dev/null; then
# -Werror makes clang bail out on unused arguments as well;
# try to add -Qunused-arguments to work-around that
# other compilers don't support it but then, it's failure like
# any other
cmdline+=( -Qunused-arguments )
"${cmdline[@]}" &>/dev/null
fi
}
# @FUNCTION: test-flag-CC
# @USAGE: <flag>
# @DESCRIPTION:
# Returns shell true if <flag> is supported by the C compiler, else returns shell false.
test-flag-CC() { test-flag-PROG "CC" c "$@"; }
# @FUNCTION: test-flag-CXX
# @USAGE: <flag>
# @DESCRIPTION:
# Returns shell true if <flag> is supported by the C++ compiler, else returns shell false.
test-flag-CXX() { test-flag-PROG "CXX" c++ "$@"; }
# @FUNCTION: test-flag-F77
# @USAGE: <flag>
# @DESCRIPTION:
# Returns shell true if <flag> is supported by the Fortran 77 compiler, else returns shell false.
test-flag-F77() { test-flag-PROG "F77" f77 "$@"; }
# @FUNCTION: test-flag-FC
# @USAGE: <flag>
# @DESCRIPTION:
# Returns shell true if <flag> is supported by the Fortran 90 compiler, else returns shell false.
test-flag-FC() { test-flag-PROG "FC" f95 "$@"; }
# @FUNCTION: test-flag-CCLD
# @USAGE: <flag>
# @DESCRIPTION:
# Returns shell true if <flag> is supported by the C compiler and linker, else returns shell false.
test-flag-CCLD() { test-flag-PROG "CC" c+ld "$@"; }
test-flags-PROG() {
local comp=$1
local flags=()
local x
shift
[[ -z ${comp} ]] && return 1
while (( $# )); do
case "$1" in
# '-B /foo': bug # 687198
--param|-B)
if test-flag-${comp} "$1" "$2"; then
flags+=( "$1" "$2" )
fi
shift 2
;;
*)
if test-flag-${comp} "$1"; then
flags+=( "$1" )
fi
shift 1
;;
esac
done
echo "${flags[*]}"
# Just bail if we dont have any flags
[[ ${#flags[@]} -gt 0 ]]
}
# @FUNCTION: test-flags-CC
# @USAGE: <flags>
# @DESCRIPTION:
# Returns shell true if <flags> are supported by the C compiler, else returns shell false.
test-flags-CC() { test-flags-PROG "CC" "$@"; }
# @FUNCTION: test-flags-CXX
# @USAGE: <flags>
# @DESCRIPTION:
# Returns shell true if <flags> are supported by the C++ compiler, else returns shell false.
test-flags-CXX() { test-flags-PROG "CXX" "$@"; }
# @FUNCTION: test-flags-F77
# @USAGE: <flags>
# @DESCRIPTION:
# Returns shell true if <flags> are supported by the Fortran 77 compiler, else returns shell false.
test-flags-F77() { test-flags-PROG "F77" "$@"; }
# @FUNCTION: test-flags-FC
# @USAGE: <flags>
# @DESCRIPTION:
# Returns shell true if <flags> are supported by the Fortran 90 compiler, else returns shell false.
test-flags-FC() { test-flags-PROG "FC" "$@"; }
# @FUNCTION: test-flags-CCLD
# @USAGE: <flags>
# @DESCRIPTION:
# Returns shell true if <flags> are supported by the C compiler and default linker, else returns shell false.
test-flags-CCLD() { test-flags-PROG "CCLD" "$@"; }
# @FUNCTION: test-flags
# @USAGE: <flags>
# @DESCRIPTION:
# Short-hand that should hopefully work for both C and C++ compiler, but
# its really only present due to the append-flags() abomination.
test-flags() { test-flags-CC "$@"; }
# @FUNCTION: test_version_info
# @USAGE: <version>
# @DESCRIPTION:
# Returns shell true if the current C compiler version matches <version>, else returns shell false.
# Accepts shell globs.
test_version_info() {
if [[ $($(tc-getCC) --version 2>&1) == *$1* ]]; then
return 0
else
return 1
fi
}
# @FUNCTION: strip-unsupported-flags
# @DESCRIPTION:
# Strip {C,CXX,F,FC}FLAGS of any flags not supported by the active toolchain.
strip-unsupported-flags() {
[[ $# -ne 0 ]] && die "strip-unsupported-flags takes no arguments"
export CFLAGS=$(test-flags-CC ${CFLAGS})
export CXXFLAGS=$(test-flags-CXX ${CXXFLAGS})
export FFLAGS=$(test-flags-F77 ${FFLAGS})
export FCFLAGS=$(test-flags-FC ${FCFLAGS})
export LDFLAGS=$(test-flags-CCLD ${LDFLAGS})
}
# @FUNCTION: get-flag
# @USAGE: <flag>
# @DESCRIPTION:
# Find and echo the value for a particular flag. Accepts shell globs.
get-flag() {
[[ $# -ne 1 ]] && die "usage: <flag>"
local f var findflag="$1"
# this code looks a little flaky but seems to work for
# everything we want ...
# for example, if CFLAGS="-march=i686":
# `get-flag -march` == "-march=i686"
# `get-flag march` == "i686"
for var in $(all-flag-vars) ; do
for f in ${!var} ; do
if [ "${f/${findflag}}" != "${f}" ] ; then
printf "%s\n" "${f/-${findflag}=}"
return 0
fi
done
done
return 1
}
# @FUNCTION: replace-sparc64-flags
# @DESCRIPTION:
# Sets mcpu to v8 and uses the original value as mtune if none specified.
replace-sparc64-flags() {
[[ $# -ne 0 ]] && die "replace-sparc64-flags takes no arguments"
local SPARC64_CPUS="ultrasparc3 ultrasparc v9"
if [ "${CFLAGS/mtune}" != "${CFLAGS}" ]; then
for x in ${SPARC64_CPUS}; do
CFLAGS="${CFLAGS/-mcpu=${x}/-mcpu=v8}"
done
else
for x in ${SPARC64_CPUS}; do
CFLAGS="${CFLAGS/-mcpu=${x}/-mcpu=v8 -mtune=${x}}"
done
fi
if [ "${CXXFLAGS/mtune}" != "${CXXFLAGS}" ]; then
for x in ${SPARC64_CPUS}; do
CXXFLAGS="${CXXFLAGS/-mcpu=${x}/-mcpu=v8}"
done
else
for x in ${SPARC64_CPUS}; do
CXXFLAGS="${CXXFLAGS/-mcpu=${x}/-mcpu=v8 -mtune=${x}}"
done
fi
export CFLAGS CXXFLAGS
}
# @FUNCTION: append-libs
# @USAGE: <libs>
# @DESCRIPTION:
# Add extra <libs> to the current LIBS. All arguments should be prefixed with
# either -l or -L. For compatibility, if arguments are not prefixed as
# options, they are given a -l prefix automatically.
append-libs() {
[[ $# -eq 0 ]] && return 0
local flag
for flag in "$@"; do
if [[ -z "${flag// }" ]]; then
eqawarn "Appending an empty argument to LIBS is invalid! Skipping."
continue
fi
case $flag in
-[lL]*)
export LIBS="${LIBS} ${flag}"
;;
-*)
eqawarn "Appending non-library to LIBS (${flag}); Other linker flags should be passed via LDFLAGS"
export LIBS="${LIBS} ${flag}"
;;
*)
export LIBS="${LIBS} -l${flag}"
esac
done
return 0
}
# @FUNCTION: raw-ldflags
# @USAGE: [flags]
# @DESCRIPTION:
# Turn C style ldflags (-Wl,-foo) into straight ldflags - the results
# are suitable for passing directly to 'ld'; note LDFLAGS is usually passed
# to gcc where it needs the '-Wl,'.
#
# If no flags are specified, then default to ${LDFLAGS}.
raw-ldflags() {
local x input="$@"
[[ -z ${input} ]] && input=${LDFLAGS}
set --
for x in ${input} ; do
case ${x} in
-Wl,*)
x=${x#-Wl,}
set -- "$@" ${x//,/ }
;;
*) # Assume it's a compiler driver flag, so throw it away #441808
;;
esac
done
echo "$@"
}
# @FUNCTION: no-as-needed
# @RETURN: Flag to disable asneeded behavior for use with append-ldflags.
no-as-needed() {
[[ $# -ne 0 ]] && die "no-as-needed takes no arguments"
case $($(tc-getLD) -v 2>&1 </dev/null) in
*GNU*) # GNU ld
echo "-Wl,--no-as-needed" ;;
esac
}
fi

View File

@ -0,0 +1,45 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# Author: Robin H. Johnson <robbat2@gentoo.org>
# font-ebdftopcf.eclass
# Eclass to make PCF font generator from BDF uniform and optimal
# The manpage for this eclass is in media-gfx/ebdftopcf.
# inherit this eclass after font.eclass
# if USE="-X", this eclass is basically a no-op, since bdftopcf requires Xorg.
IUSE="X"
# Variable declarations
DEPEND="X? ( media-gfx/ebdftopcf )"
RDEPEND=""
#
# Public functions
#
ebdftopcf() {
local bdffiles
bdffiles="$@"
[ -z "$bdffiles" ] && die "No BDF files specified."
emake -f "${EPREFIX}"/usr/share/ebdftopcf/Makefile.ebdftopcf \
BDFFILES="${bdffiles}" \
BDFTOPCF_PARAMS="${BDFTOPCF_PARAMS}" \
|| die "Failed to build PCF files"
}
#
# Public inheritable functions
#
font-ebdftopcf_src_compile() {
use X && FONT_SUFFIX="pcf.gz"
use X || FONT_SUFFIX="bdf"
if use X; then
[ -z "${BDFFILES}" ] && BDFFILES="$(find . -name '*.bdf')"
ebdftopcf ${BDFFILES}
fi
}
EXPORT_FUNCTIONS src_compile

258
eclass/font.eclass Normal file
View File

@ -0,0 +1,258 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: font.eclass
# @MAINTAINER:
# fonts@gentoo.org
# @SUPPORTED_EAPIS: 5 6 7
# @BLURB: Eclass to make font installation uniform
case ${EAPI:-0} in
[56]) inherit eutils ;;
7) ;;
*) die "EAPI ${EAPI} is not supported by font.eclass." ;;
esac
if [[ ! ${_FONT_ECLASS} ]]; then
EXPORT_FUNCTIONS pkg_setup src_install pkg_postinst pkg_postrm
# @ECLASS-VARIABLE: FONT_SUFFIX
# @DEFAULT_UNSET
# @REQUIRED
# @DESCRIPTION:
# Space delimited list of font suffixes to install.
FONT_SUFFIX=${FONT_SUFFIX:-}
# @ECLASS-VARIABLE: FONT_S
# @DEFAULT_UNSET
# @DESCRIPTION:
# Directory containing the fonts. If unset, ${S} is used instead.
# Can also be an array of several directories.
# @ECLASS-VARIABLE: FONT_PN
# @DESCRIPTION:
# Font name (ie. last part of FONTDIR).
FONT_PN=${FONT_PN:-${PN}}
# @ECLASS-VARIABLE: FONTDIR
# @DESCRIPTION:
# Full path to installation directory.
FONTDIR=${FONTDIR:-/usr/share/fonts/${FONT_PN}}
# @ECLASS-VARIABLE: FONT_CONF
# @DEFAULT_UNSET
# @DESCRIPTION:
# Array containing fontconfig conf files to install.
FONT_CONF=( "" )
# @ECLASS-VARIABLE: DOCS
# @DEFAULT_UNSET
# @DESCRIPTION:
# Space delimited list of docs to install.
# We always install these:
# COPYRIGHT README{,.txt} NEWS AUTHORS BUGS ChangeLog FONTLOG.txt
DOCS=${DOCS:-}
if [[ ${CATEGORY}/${PN} != media-fonts/encodings ]]; then
IUSE="X"
DEPEND="X? (
>=x11-apps/mkfontscale-1.2.0
media-fonts/encodings
)"
RDEPEND=""
fi
# @FUNCTION: font_xfont_config
# @DESCRIPTION:
# Generate Xorg font files (mkfontscale/mkfontdir).
font_xfont_config() {
local dir_name
if in_iuse X && use X ; then
dir_name="${1:-${FONT_PN}}"
rm -f "${ED%/}/${FONTDIR}/${1//${S}/}"/{fonts.{dir,scale},encodings.dir} \
|| die "failed to prepare ${FONTDIR}/${1//${S}/}"
einfo "Creating fonts.scale & fonts.dir in ${dir_name##*/}"
mkfontscale "${ED%/}/${FONTDIR}/${1//${S}/}" || eerror "failed to create fonts.scale"
mkfontdir \
-e ${EPREFIX}/usr/share/fonts/encodings \
-e ${EPREFIX}/usr/share/fonts/encodings/large \
"${ED%/}/${FONTDIR}/${1//${S}/}" || eerror "failed to create fonts.dir"
[[ -e fonts.alias ]] && doins fonts.alias
fi
}
# @FUNCTION: font_fontconfig
# @DESCRIPTION:
# Install fontconfig conf files given in FONT_CONF.
font_fontconfig() {
local conffile
if [[ -n ${FONT_CONF[@]} ]]; then
insinto /etc/fonts/conf.avail/
for conffile in "${FONT_CONF[@]}"; do
[[ -e ${conffile} ]] && doins "${conffile}"
done
fi
}
# @FUNCTION: font_cleanup_dirs
# @DESCRIPTION:
# Remove font directories containing only generated files.
font_cleanup_dirs() {
local genfiles="encodings.dir fonts.alias fonts.cache-1 fonts.dir fonts.scale"
# fonts.alias isn't generated but it's a special case (see below).
local d f g generated candidate otherfile
ebegin "Cleaning up font directories"
while read -d $'\0' -r; do
candidate=false
otherfile=false
for f in "${d}"/*; do
generated=false
# make sure this is a file and not a subdir
[[ -e ${f} || -L ${f} ]] || continue
if has ${f##*/} ${genfiles}; then
# this is a generated file
generated=true
break
fi
# if the file is a generated file then we know this is a font dir (as
# opposed to something like encodings or util) and a candidate for
# removal. if it's not generated then it's an "otherfile".
${generated} && candidate=true || otherfile=true
# if the directory is both a candidate for removal and contains at
# least one "otherfile" then don't remove it.
[[ ${candidate} == ${otherfile} ]] && break
done
# if in the end we only have generated files, purge the directory.
if [[ ${candidate} == true && ${otherfile} == false ]]; then
# we don't want to remove fonts.alias files that were installed by
# media-fonts/font-alias. any other fonts.alias files will have
# already been unmerged with their packages.
for g in ${genfiles}; do
if [[ ${g} != fonts.alias && ( -e ${d}/${g} || -L ${d}/${g} ) ]] ; then
rm "${d}"/${g} || eerror "failed to remove ${d}/${g}"
fi
done
# if there's nothing left remove the directory
find "${d}" -maxdepth 0 -type d -empty -delete || eerror "failed to purge ${d}"
fi
done < <(find -L "${EROOT%/}"/usr/share/fonts/ -type d -print0)
eend 0
}
# @FUNCTION: font_pkg_setup
# @DESCRIPTION:
# The font pkg_setup function.
# Collision protection
font_pkg_setup() {
# make sure we get no collisions
# setup is not the nicest place, but preinst doesn't cut it
if [[ -e "${EROOT%/}/${FONTDIR}/fonts.cache-1" ]] ; then
rm "${EROOT%/}/${FONTDIR}/fonts.cache-1" || die "failed to remove fonts.cache-1"
fi
}
# @FUNCTION: font_src_install
# @DESCRIPTION:
# The font src_install function.
font_src_install() {
local dir suffix commondoc
if [[ $(declare -p FONT_S 2>/dev/null) == "declare -a"* ]]; then
# recreate the directory structure if FONT_S is an array
for dir in "${FONT_S[@]}"; do
pushd "${dir}" > /dev/null || die "pushd ${dir} failed"
insinto "${FONTDIR}/${dir#"${S}"}"
for suffix in ${FONT_SUFFIX}; do
doins *.${suffix}
done
font_xfont_config "${dir}"
popd > /dev/null || die
done
elif [[ ${FONT_S/[[:space:]]} != "${FONT_S}" ]]; then
# backwards compatibility code, can be removed after 2021-02-14
eqawarn "Using a space-separated list for FONT_S is deprecated."
eqawarn "Use a bash array instead if there are multiple directories."
for dir in ${FONT_S}; do
pushd "${dir}" > /dev/null || die "pushd ${dir} failed"
insinto "${FONTDIR}/${dir//${S}/}"
for suffix in ${FONT_SUFFIX}; do
doins *.${suffix}
done
font_xfont_config "${dir}"
popd > /dev/null || die
done
else
pushd "${FONT_S:-${S}}" > /dev/null \
|| die "pushd ${FONT_S:-${S}} failed"
insinto "${FONTDIR}"
for suffix in ${FONT_SUFFIX}; do
doins *.${suffix}
done
font_xfont_config
popd > /dev/null || die
fi
font_fontconfig
einstalldocs
# install common docs
for commondoc in COPYRIGHT FONTLOG.txt; do
[[ -s ${commondoc} ]] && dodoc ${commondoc}
done
}
# @FUNCTION: _update_fontcache
# @DESCRIPTION:
# Updates fontcache if !prefix and media-libs/fontconfig installed
_update_fontcache() {
# unreadable font files = fontconfig segfaults
find "${EROOT%/}"/usr/share/fonts/ -type f '!' -perm 0644 \
-exec chmod -v 0644 2>/dev/null {} + || die "failed to fix font files perms"
if [[ -z ${ROOT%/} ]] ; then
if has_version media-libs/fontconfig ; then
ebegin "Updating global fontcache"
fc-cache -fs
if ! eend $? ; then
die "failed to update global fontcache"
fi
else
einfo "Skipping fontcache update (media-libs/fontconfig not installed)"
fi
else
einfo "Skipping fontcache update (ROOT != /)"
fi
}
# @FUNCTION: font_pkg_postinst
# @DESCRIPTION:
# The font pkg_postinst function.
font_pkg_postinst() {
if [[ -n ${FONT_CONF[@]} ]]; then
local conffile
elog "The following fontconfig configuration files have been installed:"
elog
for conffile in "${FONT_CONF[@]}"; do
[[ -e "${EROOT%/}"/etc/fonts/conf.avail/${conffile##*/} ]] &&
elog " ${conffile##*/}"
done
elog
elog "Use \`eselect fontconfig\` to enable/disable them."
fi
_update_fontcache
}
# @FUNCTION: font_pkg_postrm
# @DESCRIPTION:
# The font pkg_postrm function.
font_pkg_postrm() {
font_cleanup_dirs
_update_fontcache
}
_FONT_ECLASS=1
fi

286
eclass/fortran-2.eclass Normal file
View File

@ -0,0 +1,286 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: fortran-2.eclass
# @MAINTAINER:
# sci@gentoo.org
# @AUTHOR:
# Author Justin Lecher <jlec@gentoo.org>
# Test functions provided by Sebastien Fabbro and Kacper Kowalik
# @SUPPORTED_EAPIS: 4 5 6 7
# @BLURB: Simplify fortran compiler management
# @DESCRIPTION:
# If you need a fortran compiler, then you should be inheriting this eclass.
# In case you only need optional support, please export FORTRAN_NEEDED before
# inheriting the eclass.
#
# The eclass tests for working fortran compilers
# and exports the variables FC and F77.
# Optionally, it checks for extended capabilities based on
# the variable options selected in the ebuild
# The only phase function exported is fortran-2_pkg_setup.
# @EXAMPLE:
# FORTRAN_NEEDED="lapack fortran"
#
# inherit fortran-2
#
# FORTRAN_NEED_OPENMP=1
inherit toolchain-funcs
case ${EAPI:-0} in
# not used in the eclass, but left for backward compatibility with legacy users
4|5|6) inherit eutils ;;
7) ;;
*) die "EAPI=${EAPI} is not supported" ;;
esac
EXPORT_FUNCTIONS pkg_setup
if [[ ! ${_FORTRAN_2_CLASS} ]]; then
# @ECLASS-VARIABLE: FORTRAN_NEED_OPENMP
# @DESCRIPTION:
# Set to "1" in order to automatically have the eclass abort if the fortran
# compiler lacks openmp support.
: ${FORTRAN_NEED_OPENMP:=0}
# @ECLASS-VARIABLE: FORTRAN_STANDARD
# @DESCRIPTION:
# Set this, if a special dialect needs to be supported.
# Generally not needed as default is sufficient.
#
# Valid settings are any combination of: 77 90 95 2003
: ${FORTRAN_STANDARD:=77}
# @ECLASS-VARIABLE: FORTRAN_NEEDED
# @DESCRIPTION:
# If your package has an optional fortran support, set this variable
# to the space separated list of USE triggering the fortran
# dependency.
#
# e.g. FORTRAN_NEEDED=lapack would result in
#
# DEPEND="lapack? ( virtual/fortran )"
#
# If unset, we always depend on virtual/fortran.
: ${FORTRAN_NEEDED:=always}
for _f_use in ${FORTRAN_NEEDED}; do
case ${_f_use} in
always)
DEPEND+=" virtual/fortran"
RDEPEND+=" virtual/fortran"
break
;;
no)
break
;;
test)
DEPEND+=" ${_f_use}? ( virtual/fortran )"
;;
*)
DEPEND+=" ${_f_use}? ( virtual/fortran )"
RDEPEND+=" ${_f_use}? ( virtual/fortran )"
;;
esac
done
unset _f_use
# @FUNCTION: fortran_int64_abi_fflags
# @DESCRIPTION:
# Return the Fortran compiler flag to enable 64 bit integers for
# array indices
fortran_int64_abi_fflags() {
debug-print-function ${FUNCNAME} "${@}"
local _FC=$(tc-getFC)
if [[ ${_FC} == *gfortran* ]]; then
echo "-fdefault-integer-8"
elif [[ ${_FC} == ifort ]]; then
echo "-integer-size 64"
else
die "Compiler flag for 64bit interger for ${_FC} unknown"
fi
}
# @FUNCTION: _fortran_write_testsuite
# @INTERNAL
# @DESCRIPTION:
# writes fortran test code
_fortran_write_testsuite() {
debug-print-function ${FUNCNAME} "${@}"
local filebase=${T}/test-fortran
# f77 code
cat <<- EOF > "${filebase}.f" || die
end
EOF
# f90/95 code
cat <<- EOF > "${filebase}.f90" || die
end
EOF
# f2003 code
cat <<- EOF > "${filebase}.f03" || die
procedure(), pointer :: p
end
EOF
}
# @FUNCTION: _fortran_compile_test
# @USAGE: <compiler> [dialect]
# @INTERNAL
# @DESCRIPTION:
# Takes fortran compiler as first argument and dialect as second.
# Checks whether the passed fortran compiler speaks the fortran dialect
_fortran_compile_test() {
debug-print-function ${FUNCNAME} "${@}"
local filebase=${T}/test-fortran
local fcomp=${1}
local fdia=${2}
local fcode=${filebase}.f${fdia}
local ret
[[ $# -lt 1 ]] && \
die "_fortran_compile_test() needs at least one argument"
[[ -f ${fcode} ]] || _fortran_write_testsuite
${fcomp} "${fcode}" -o "${fcode}.x" \
>> "${T}"/_fortran_compile_test.log 2>&1
ret=$?
rm -f "${fcode}.x"
return ${ret}
}
# @FUNCTION: _fortran-has-openmp
# @RETURN: return code of the compiler
# @INTERNAL
# @DESCRIPTION:
# See if the fortran supports OpenMP.
_fortran-has-openmp() {
debug-print-function ${FUNCNAME} "${@}"
local flag
local filebase=${T}/test-fc-openmp
local fcode=${filebase}.f
local ret
local _fc=$(tc-getFC)
cat <<- EOF > "${fcode}" || die
call omp_get_num_threads
end
EOF
for flag in -fopenmp -xopenmp -openmp -mp -omp -qsmp=omp; do
${_fc} ${flag} "${fcode}" -o "${fcode}.x" \
&>> "${T}"/_fortran_compile_test.log
ret=$?
[[ ${ret} == 0 ]] && break
done
rm -f "${fcode}.x"
return ${ret}
}
# @FUNCTION: _fortran_die_msg
# @INTERNAL
# @DESCRIPTION:
# Detailed description how to handle fortran support
_fortran_die_msg() {
debug-print-function ${FUNCNAME} "${@}"
eerror
eerror "Please install currently selected gcc version with USE=fortran."
eerror "If you intend to use a different compiler then gfortran, please"
eerror "set FC variable accordingly and take care that the necessary"
eerror "fortran dialects are supported."
eerror
die "Currently no working fortran compiler is available (see ${T}/_fortran_compile_test.log for information)"
}
# @FUNCTION: _fortran_test_function
# @INTERNAL
# @DESCRIPTION:
# Internal test function for working fortran compiler.
# It is called in fortran-2_pkg_setup.
_fortran_test_function() {
debug-print-function ${FUNCNAME} "${@}"
local dialect
: ${F77:=$(tc-getFC)}
: ${FORTRAN_STANDARD:=77}
for dialect in ${FORTRAN_STANDARD}; do
case ${dialect} in
77) _fortran_compile_test $(tc-getF77) || \
_fortran_die_msg ;;
90|95) _fortran_compile_test $(tc-getFC) 90 || \
_fortran_die_msg ;;
2003) _fortran_compile_test $(tc-getFC) 03 || \
_fortran_die_msg ;;
2008) die "Future" ;;
*) die "${dialect} is not a Fortran dialect." ;;
esac
done
tc-export F77 FC
einfo "Using following Fortran compiler:"
einfo " F77: ${F77}"
einfo " FC: ${FC}"
if [[ ${FORTRAN_NEED_OPENMP} == 1 ]]; then
if _fortran-has-openmp; then
einfo "${FC} has OPENMP support"
else
die "Please install current gcc with USE=openmp or set the FC variable to a compiler that supports OpenMP"
fi
fi
}
# @FUNCTION: _fortran-2_pkg_setup
# @INTERNAL
# @DESCRIPTION:
# _The_ fortran-2_pkg_setup() code
_fortran-2_pkg_setup() {
for _f_use in ${FORTRAN_NEEDED}; do
case ${_f_use} in
always)
_fortran_test_function && break 2
;;
no)
einfo "Forcing fortran support off"
break
;;
*)
if use ${_f_use}; then
_fortran_test_function && break 2
else
unset FC
unset F77
fi
;;
esac
done
}
# @FUNCTION: fortran-2_pkg_setup
# @DESCRIPTION:
# Setup functionality,
# checks for a valid fortran compiler and optionally for its openmp support.
fortran-2_pkg_setup() {
debug-print-function ${FUNCNAME} "${@}"
if [[ ${MERGE_TYPE} != binary ]]; then
_fortran-2_pkg_setup
fi
}
_FORTRAN_2_ECLASS=1
fi

52
eclass/freedict.eclass Normal file
View File

@ -0,0 +1,52 @@
# Copyright 1999-2018 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: freedict.eclass
# @MAINTAINER:
# maintainer-needed@gentoo.org
# @AUTHOR:
# Original author: Seemant Kulleen
# @SUPPORTED_EAPIS: 6
# @BLURB: Ease the installation of freedict translation dictionaries
# @DESCRIPTION:
# This eclass exists to ease the installation of freedict translation
# dictionaries. The only variables which need to be defined in the actual
# ebuilds are FORLANG and TOLANG for the source and target languages,
# respectively.
# @ECLASS-VARIABLE: FORLANG
# @DESCRIPTION:
# Please see above for a description.
# @ECLASS-VARIABLE: TOLANG
# @DESCRIPTION:
# Please see above for a description.
case ${EAPI:-0} in
6) ;;
*) die "${ECLASS}.eclass is banned in EAPI=${EAPI}" ;;
esac
MY_P=${PN/freedict-/}
DESCRIPTION="Freedict for language translation from ${FORLANG} to ${TOLANG}"
HOMEPAGE="http://freedict.sourceforge.net/"
SRC_URI="http://freedict.sourceforge.net/download/linux/${MY_P}.tar.gz"
LICENSE="GPL-2+"
SLOT="0"
RDEPEND="app-text/dictd"
S="${WORKDIR}"
# @FUNCTION: freedict_src_install
# @DESCRIPTION:
# The freedict src_install function, which is exported
freedict_src_install() {
insinto /usr/$(get_libdir)/dict
doins ${MY_P}.dict.dz
doins ${MY_P}.index
}
EXPORT_FUNCTIONS src_install

397
eclass/games.eclass Normal file
View File

@ -0,0 +1,397 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: games.eclass
# @MAINTAINER:
# Games team <games@gentoo.org>
# @SUPPORTED_EAPIS: 0 1 2 3 4 5
# @BLURB: Standardizing the install of games.
# @DEPRECATED: none
# @DESCRIPTION:
# This eclass makes sure that games are consistently handled in gentoo.
# It installs game files by default in FHS-compatible directories
# like /usr/share/games and sets more restrictive permissions in order
# to avoid some security bugs.
#
# The installation directories as well as the user and group files are
# installed as can be controlled by the user. See the variables like
# GAMES_BINDIR, GAMES_USER etc. below. These are NOT supposed to be set
# by ebuilds!
#
# For a general guide on writing games ebuilds, see:
# https://wiki.gentoo.org/wiki/Project:Games/Ebuild_howto
#
# WARNING: This eclass is DEPRECATED and must not be used by new games
# ebuilds, bug #574082. When writing game ebuilds, no specific eclass
# is needed. For more details, see the QA team policies page:
# https://wiki.gentoo.org/wiki/Project:Quality_Assurance/Policies#Games
if [[ -z ${_GAMES_ECLASS} ]]; then
_GAMES_ECLASS=1
inherit base multilib toolchain-funcs eutils user
case ${EAPI:-0} in
0|1) EXPORT_FUNCTIONS pkg_setup src_compile pkg_preinst pkg_postinst ;;
2|3|4|5) EXPORT_FUNCTIONS pkg_setup src_configure src_compile pkg_preinst pkg_postinst ;;
*) die "games.eclass is banned in EAPI=${EAPI}, see https://wiki.gentoo.org/wiki/Project:Quality_Assurance/Policies#Games" ;;
esac
if [[ ${CATEGORY}/${PN} != "games-misc/games-envd" ]] ; then
# environment file
RDEPEND="games-misc/games-envd"
fi
# @ECLASS-VARIABLE: GAMES_PREFIX
# @DESCRIPTION:
# Prefix where to install games, mostly used by GAMES_BINDIR. Games data should
# still go into GAMES_DATADIR. May be set by the user.
GAMES_PREFIX=${GAMES_PREFIX:-/usr/games}
# @ECLASS-VARIABLE: GAMES_PREFIX_OPT
# @DESCRIPTION:
# Prefix where to install precompiled/blob games, usually followed by
# package name. May be set by the user.
GAMES_PREFIX_OPT=${GAMES_PREFIX_OPT:-/opt}
# @ECLASS-VARIABLE: GAMES_DATADIR
# @DESCRIPTION:
# Base directory where to install game data files, usually followed by
# package name. May be set by the user.
GAMES_DATADIR=${GAMES_DATADIR:-/usr/share/games}
# @ECLASS-VARIABLE: GAMES_DATADIR_BASE
# @DESCRIPTION:
# Similar to GAMES_DATADIR, but only used when a package auto appends 'games'
# to the path. May be set by the user.
GAMES_DATADIR_BASE=${GAMES_DATADIR_BASE:-/usr/share}
# @ECLASS-VARIABLE: GAMES_SYSCONFDIR
# @DESCRIPTION:
# Where to install global games configuration files, usually followed by
# package name. May be set by the user.
GAMES_SYSCONFDIR=${GAMES_SYSCONFDIR:-/etc/games}
# @ECLASS-VARIABLE: GAMES_STATEDIR
# @DESCRIPTION:
# Where to install/store global variable game data, usually followed by
# package name. May be set by the user.
GAMES_STATEDIR=${GAMES_STATEDIR:-/var/games}
# @ECLASS-VARIABLE: GAMES_LOGDIR
# @DESCRIPTION:
# Where to store global game log files, usually followed by
# package name. May be set by the user.
GAMES_LOGDIR=${GAMES_LOGDIR:-/var/log/games}
# @ECLASS-VARIABLE: GAMES_BINDIR
# @DESCRIPTION:
# Where to install the game binaries. May be set by the user. This is in PATH.
GAMES_BINDIR=${GAMES_BINDIR:-${GAMES_PREFIX}/bin}
# @ECLASS-VARIABLE: GAMES_ENVD
# @INTERNAL
# @DESCRIPTION:
# The games environment file name which sets games specific LDPATH and PATH.
GAMES_ENVD="90games"
# @ECLASS-VARIABLE: GAMES_USER
# @DESCRIPTION:
# The USER who owns all game files and usually has write permissions.
# May be set by the user.
GAMES_USER=${GAMES_USER:-root}
# @ECLASS-VARIABLE: GAMES_USER_DED
# @DESCRIPTION:
# The USER who owns all game files related to the dedicated server part
# of a package. May be set by the user.
GAMES_USER_DED=${GAMES_USER_DED:-games}
# @ECLASS-VARIABLE: GAMES_GROUP
# @DESCRIPTION:
# The GROUP that owns all game files and usually does not have
# write permissions. May be set by the user.
# If you want games world-executable, then you can at least set this variable
# to 'users' which is almost the same.
GAMES_GROUP=${GAMES_GROUP:-games}
# @FUNCTION: games_get_libdir
# @DESCRIPTION:
# Gets the directory where to install games libraries. This is in LDPATH.
games_get_libdir() {
echo ${GAMES_PREFIX}/$(get_libdir)
}
# @FUNCTION: egamesconf
# @USAGE: [<args>...]
# @DESCRIPTION:
# Games equivalent to 'econf' for autotools based build systems. It passes
# the necessary games specific directories automatically.
egamesconf() {
# handle verbose build log pre-EAPI5
local _gamesconf
if has "${EAPI:-0}" 0 1 2 3 4 ; then
if grep -q -s disable-silent-rules "${ECONF_SOURCE:-.}"/configure ; then
_gamesconf="--disable-silent-rules"
fi
fi
# bug 493954
if grep -q -s datarootdir "${ECONF_SOURCE:-.}"/configure ; then
_gamesconf="${_gamesconf} --datarootdir=/usr/share"
fi
econf \
--prefix="${GAMES_PREFIX}" \
--libdir="$(games_get_libdir)" \
--datadir="${GAMES_DATADIR}" \
--sysconfdir="${GAMES_SYSCONFDIR}" \
--localstatedir="${GAMES_STATEDIR}" \
${_gamesconf} \
"$@"
}
# @FUNCTION: gameswrapper
# @USAGE: <command> [<args>...]
# @INTERNAL
# @DESCRIPTION:
# Wraps an install command like dobin, dolib etc, so that
# it has GAMES_PREFIX as prefix.
gameswrapper() {
# dont want to pollute calling env
(
into "${GAMES_PREFIX}"
cmd=$1
shift
${cmd} "$@"
)
}
# @FUNCTION: dogamesbin
# @USAGE: <path>...
# @DESCRIPTION:
# Install one or more games binaries.
dogamesbin() { gameswrapper ${FUNCNAME/games} "$@"; }
# @FUNCTION: dogamessbin
# @USAGE: <path>...
# @DESCRIPTION:
# Install one or more games system binaries.
dogamessbin() { gameswrapper ${FUNCNAME/games} "$@"; }
# @FUNCTION: dogameslib
# @USAGE: <path>...
# @DESCRIPTION:
# Install one or more games libraries.
dogameslib() { gameswrapper ${FUNCNAME/games} "$@"; }
# @FUNCTION: dogameslib.a
# @USAGE: <path>...
# @DESCRIPTION:
# Install one or more static games libraries.
dogameslib.a() { gameswrapper ${FUNCNAME/games} "$@"; }
# @FUNCTION: dogameslib.so
# @USAGE: <path>...
# @DESCRIPTION:
# Install one or more shared games libraries.
dogameslib.so() { gameswrapper ${FUNCNAME/games} "$@"; }
# @FUNCTION: newgamesbin
# @USAGE: <path> <newname>
# @DESCRIPTION:
# Install one games binary with a new name.
newgamesbin() { gameswrapper ${FUNCNAME/games} "$@"; }
# @FUNCTION: newgamessbin
# @USAGE: <path> <newname>
# @DESCRIPTION:
# Install one system games binary with a new name.
newgamessbin() { gameswrapper ${FUNCNAME/games} "$@"; }
# @FUNCTION: games_make_wrapper
# @USAGE: <wrapper> <target> [chdir] [libpaths] [installpath]
# @DESCRIPTION:
# Create a shell wrapper script named wrapper in installpath
# (defaults to the games bindir) to execute target (default of wrapper) by
# first optionally setting LD_LIBRARY_PATH to the colon-delimited
# libpaths followed by optionally changing directory to chdir.
games_make_wrapper() { gameswrapper ${FUNCNAME/games_} "$@"; }
# @FUNCTION: gamesowners
# @USAGE: [<args excluding owner/group>...] <path>...
# @DESCRIPTION:
# Run 'chown' with the given args on the given files. Owner and
# group are GAMES_USER and GAMES_GROUP and must not be passed
# as args.
gamesowners() { chown ${GAMES_USER}:${GAMES_GROUP} "$@"; }
# @FUNCTION: gamesperms
# @USAGE: <path>...
# @DESCRIPTION:
# Run 'chmod' with games specific permissions on the given files.
gamesperms() { chmod u+rw,g+r-w,o-rwx "$@"; }
# @FUNCTION: prepgamesdirs
# @DESCRIPTION:
# Fix all permissions/owners of files in games related directories,
# usually called at the end of src_install().
prepgamesdirs() {
local dir f mode
for dir in \
"${GAMES_PREFIX}" "${GAMES_PREFIX_OPT}" "${GAMES_DATADIR}" \
"${GAMES_SYSCONFDIR}" "${GAMES_STATEDIR}" "$(games_get_libdir)" \
"${GAMES_BINDIR}" "$@"
do
[[ ! -d ${D}/${dir} ]] && continue
(
gamesowners -R "${D}/${dir}"
find "${D}/${dir}" -type d -print0 | xargs -0 chmod 750
mode=o-rwx,g+r,g-w
[[ ${dir} = ${GAMES_STATEDIR} ]] && mode=o-rwx,g+r
find "${D}/${dir}" -type f -print0 | xargs -0 chmod $mode
# common trees should not be games owned #264872 #537580
fowners root:0 "${dir}"
fperms 755 "${dir}"
if [[ ${dir} == "${GAMES_PREFIX}" \
|| ${dir} == "${GAMES_PREFIX_OPT}" ]] ; then
for d in $(get_libdir) bin ; do
# check if dirs exist to avoid "nonfatal" option
if [[ -e ${D}/${dir}/${d} ]] ; then
fowners root:0 "${dir}/${d}"
fperms 755 "${dir}/${d}"
fi
done
fi
) &>/dev/null
f=$(find "${D}/${dir}" -perm +4000 -a -uid 0 2>/dev/null)
if [[ -n ${f} ]] ; then
eerror "A game was detected that is setuid root!"
eerror "${f}"
die "refusing to merge a setuid root game"
fi
done
[[ -d ${D}/${GAMES_BINDIR} ]] || return 0
find "${D}/${GAMES_BINDIR}" -maxdepth 1 -type f -exec chmod 750 '{}' \;
}
# @FUNCTION: games_pkg_setup
# @DESCRIPTION:
# Export some toolchain specific variables and create games related groups
# and users. This function is exported as pkg_setup().
games_pkg_setup() {
tc-export CC CXX LD AR RANLIB
enewgroup "${GAMES_GROUP}" 35
[[ ${GAMES_USER} != "root" ]] \
&& enewuser "${GAMES_USER}" 35 -1 "${GAMES_PREFIX}" "${GAMES_GROUP}"
[[ ${GAMES_USER_DED} != "root" ]] \
&& enewuser "${GAMES_USER_DED}" 36 /bin/bash "${GAMES_PREFIX}" "${GAMES_GROUP}"
# Dear portage team, we are so sorry. Lots of love, games team.
# See Bug #61680
[[ ${USERLAND} != "GNU" ]] && return 0
[[ $(egetshell "${GAMES_USER_DED}") == "/bin/false" ]] \
&& usermod -s /bin/bash "${GAMES_USER_DED}"
}
# @FUNCTION: games_src_configure
# @DESCRIPTION:
# Runs egamesconf if there is a configure file.
# This function is exported as src_configure().
games_src_configure() {
[[ -x "${ECONF_SOURCE:-.}"/configure ]] && egamesconf
}
# @FUNCTION: games_src_compile
# @DESCRIPTION:
# Runs base_src_make(). This function is exported as src_compile().
games_src_compile() {
case ${EAPI:-0} in
0|1) games_src_configure ;;
esac
base_src_make
}
# @FUNCTION: games_pkg_preinst
# @DESCRIPTION:
# Synchronizes GAMES_STATEDIR of the ebuild image with the live filesystem.
games_pkg_preinst() {
local f
while read f ; do
if [[ -e ${ROOT}/${GAMES_STATEDIR}/${f} ]] ; then
cp -p \
"${ROOT}/${GAMES_STATEDIR}/${f}" \
"${D}/${GAMES_STATEDIR}/${f}" \
|| die "cp failed"
# make the date match the rest of the install
touch "${D}/${GAMES_STATEDIR}/${f}"
fi
done < <(find "${D}/${GAMES_STATEDIR}" -type f -printf '%P\n' 2>/dev/null)
}
# @FUNCTION: games_pkg_postinst
# @DESCRIPTION:
# Prints some warnings and infos, also related to games groups.
games_pkg_postinst() {
if [[ -z "${GAMES_SHOW_WARNING}" ]] ; then
ewarn "Remember, in order to play games, you have to"
ewarn "be in the '${GAMES_GROUP}' group."
echo
case ${CHOST} in
*-darwin*) ewarn "Just run 'niutil -appendprop / /groups/games users <USER>'";;
*-freebsd*|*-dragonfly*) ewarn "Just run 'pw groupmod ${GAMES_GROUP} -m <USER>'";;
*) ewarn "Just run 'gpasswd -a <USER> ${GAMES_GROUP}', then have <USER> re-login.";;
esac
echo
einfo "For more info about Gentoo gaming in general, see our website:"
einfo " https://games.gentoo.org/"
echo
fi
}
# @FUNCTION: games_ut_unpack
# @USAGE: <directory or file to unpack>
# @DESCRIPTION:
# Unpack .uz2 files for UT2003/UT2004.
games_ut_unpack() {
local ut_unpack="$1"
local f=
if [[ -z ${ut_unpack} ]] ; then
die "You must provide an argument to games_ut_unpack"
fi
if [[ -f ${ut_unpack} ]] ; then
uz2unpack "${ut_unpack}" "${ut_unpack%.uz2}" \
|| die "uncompressing file ${ut_unpack}"
fi
if [[ -d ${ut_unpack} ]] ; then
while read f ; do
uz2unpack "${ut_unpack}/${f}" "${ut_unpack}/${f%.uz2}" \
|| die "uncompressing file ${f}"
rm -f "${ut_unpack}/${f}" || die "deleting compressed file ${f}"
done < <(find "${ut_unpack}" -maxdepth 1 -name '*.uz2' -printf '%f\n' 2>/dev/null)
fi
}
# @FUNCTION: games_umod_unpack
# @USAGE: <file to unpack>
# @DESCRIPTION:
# Unpacks .umod/.ut2mod/.ut4mod files for UT/UT2003/UT2004.
# Don't forget to set 'dir' and 'Ddir'.
games_umod_unpack() {
local umod=$1
mkdir -p "${Ddir}"/System
cp "${dir}"/System/{ucc-bin,{Manifest,Def{ault,User}}.ini,{Engine,Core,zlib,ogg,vorbis}.so,{Engine,Core}.int} "${Ddir}"/System
cd "${Ddir}"/System
UT_DATA_PATH=${Ddir}/System ./ucc-bin umodunpack -x "${S}/${umod}" -nohomedir &> /dev/null \
|| die "uncompressing file ${umod}"
rm -f "${Ddir}"/System/{ucc-bin,{Manifest,Def{ault,User},User,UT200{3,4}}.ini,{Engine,Core,zlib,ogg,vorbis}.so,{Engine,Core}.int,ucc.log} &>/dev/null \
|| die "Removing temporary files"
}
fi

368
eclass/ghc-package.eclass Normal file
View File

@ -0,0 +1,368 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: ghc-package.eclass
# @MAINTAINER:
# "Gentoo's Haskell Language team" <haskell@gentoo.org>
# @AUTHOR:
# Original Author: Andres Loeh <kosmikus@gentoo.org>
# @BLURB: This eclass helps with the Glasgow Haskell Compiler's package configuration utility.
# @DESCRIPTION:
# Helper eclass to handle ghc installation/upgrade/deinstallation process.
inherit multiprocessing
# Maintain version-testing compatibility with ebuilds not using EAPI 7.
case "${EAPI:-0}" in
4|5|6) inherit eapi7-ver ;;
*) ;;
esac
# GHC uses it's own native code generator. Portage's
# QA check generates false positive because it assumes
# presence of GCC-specific sections.
#
# Workaround false positiove by disabling the check completely.
# bug #722078, bug #677600
QA_FLAGS_IGNORED='.*'
# @FUNCTION: ghc-getghc
# @DESCRIPTION:
# returns the name of the ghc executable
ghc-getghc() {
if ! type -P ${HC:-ghc}; then
ewarn "ghc not found"
type -P false
fi
}
# @FUNCTION: ghc-getghcpkg
# @DESCRIPTION:
# Internal function determines returns the name of the ghc-pkg executable
ghc-getghcpkg() {
if ! type -P ${HC_PKG:-ghc-pkg}; then
ewarn "ghc-pkg not found"
type -P false
fi
}
# @FUNCTION: ghc-getghcpkgbin
# @DESCRIPTION:
# returns the name of the ghc-pkg binary (ghc-pkg
# itself usually is a shell script, and we have to
# bypass the script under certain circumstances);
# for Cabal, we add an empty global package config file,
# because for some reason the global package file
# must be specified
ghc-getghcpkgbin() {
if ver_test "$(ghc-version)" -ge "7.9.20141222"; then
# ghc-7.10 stopped supporting single-file database
local empty_db="${T}/empty.conf.d" ghc_pkg="$(ghc-libdir)/bin/ghc-pkg"
if [[ ! -d ${empty_db} ]]; then
"${ghc_pkg}" init "${empty_db}" || die "Failed to initialize empty global db"
fi
echo "$(ghc-libdir)/bin/ghc-pkg" "--global-package-db=${empty_db}"
elif ver_test "$(ghc-version)" -ge "7.7.20121101"; then
# the ghc-pkg executable changed name in ghc 6.10, as it no longer needs
# the wrapper script with the static flags
# was moved to bin/ subtree by:
# http://www.haskell.org/pipermail/cvs-ghc/2012-September/076546.html
echo '[]' > "${T}/empty.conf"
echo "$(ghc-libdir)/bin/ghc-pkg" "--global-package-db=${T}/empty.conf"
elif ver_test "$(ghc-version)" -ge "7.5.20120516"; then
echo '[]' > "${T}/empty.conf"
echo "$(ghc-libdir)/ghc-pkg" "--global-package-db=${T}/empty.conf"
else
echo '[]' > "${T}/empty.conf"
echo "$(ghc-libdir)/ghc-pkg" "--global-conf=${T}/empty.conf"
fi
}
# @FUNCTION: ghc-version
# @DESCRIPTION:
# returns upstream version of ghc
# as reported by '--numeric-version'
# Examples: "7.10.2", "7.9.20141222"
_GHC_VERSION_CACHE=""
ghc-version() {
if [[ -z "${_GHC_VERSION_CACHE}" ]]; then
_GHC_VERSION_CACHE="$($(ghc-getghc) --numeric-version)"
fi
echo "${_GHC_VERSION_CACHE}"
}
# @FUNCTION: ghc-pm-version
# @DESCRIPTION:
# returns package manager(PM) version of ghc
# as reported by '$(best_version)'
# Examples: "PM:7.10.2", "PM:7.10.2_rc1", "PM:7.8.4-r4"
_GHC_PM_VERSION_CACHE=""
ghc-pm-version() {
local pm_ghc_p
if [[ -z "${_GHC_PM_VERSION_CACHE}" ]]; then
pm_ghc_p=$(best_version dev-lang/ghc)
_GHC_PM_VERSION_CACHE="PM:${pm_ghc_p#dev-lang/ghc-}"
fi
echo "${_GHC_PM_VERSION_CACHE}"
}
# @FUNCTION: ghc-cabal-version
# @DESCRIPTION:
# return version of the Cabal library bundled with ghc
ghc-cabal-version() {
if ver_test "$(ghc-version)" -ge "7.9.20141222"; then
# outputs in format: 'version: 1.18.1.5'
set -- `$(ghc-getghcpkg) --package-db=$(ghc-libdir)/package.conf.d.initial field Cabal version`
echo "$2"
else
local cabal_package=`echo "$(ghc-libdir)"/Cabal-*`
# /path/to/ghc/Cabal-${VER} -> ${VER}
echo "${cabal_package/*Cabal-/}"
fi
}
# @FUNCTION: ghc-is-dynamic
# @DESCRIPTION:
# checks if ghc is built against dynamic libraries
# binaries linked against GHC library (and using plugin loading)
# have to be linked the same way:
# https://ghc.haskell.org/trac/ghc/ticket/10301
ghc-is-dynamic() {
$(ghc-getghc) --info | grep "GHC Dynamic" | grep -q "YES"
}
# @FUNCTION: ghc-supports-shared-libraries
# @DESCRIPTION:
# checks if ghc is built with support for building
# shared libraries (aka '-dynamic' option)
ghc-supports-shared-libraries() {
$(ghc-getghc) --info | grep "RTS ways" | grep -q "dyn"
}
# @FUNCTION: ghc-supports-threaded-runtime
# @DESCRIPTION:
# checks if ghc is built with support for threaded
# runtime (aka '-threaded' option)
ghc-supports-threaded-runtime() {
$(ghc-getghc) --info | grep "RTS ways" | grep -q "thr"
}
# @FUNCTION: ghc-supports-smp
# @DESCRIPTION:
# checks if ghc is built with support for multiple cores runtime
ghc-supports-smp() {
$(ghc-getghc) --info | grep "Support SMP" | grep -q "YES"
}
# @FUNCTION: ghc-supports-interpreter
# @DESCRIPTION:
# checks if ghc has interpreter mode (aka GHCi)
# It usually means that ghc supports for template haskell.
ghc-supports-interpreter() {
$(ghc-getghc) --info | grep "Have interpreter" | grep -q "YES"
}
# @FUNCTION: ghc-supports-parallel-make
# @DESCRIPTION:
# checks if ghc has support for '--make -j' mode
# The option was introduced in ghc-7.8-rc1.
ghc-supports-parallel-make() {
$(ghc-getghc) --info | grep "Support parallel --make" | grep -q "YES"
}
# @FUNCTION: ghc-extractportageversion
# @DESCRIPTION:
# extract the version of a portage-installed package
ghc-extractportageversion() {
local pkg
local version
pkg="$(best_version $1)"
version="${pkg#$1-}"
version="${version%-r*}"
version="${version%_pre*}"
echo "${version}"
}
# @FUNCTION: ghc-libdir
# @DESCRIPTION:
# returns the library directory
_GHC_LIBDIR_CACHE=""
ghc-libdir() {
if [[ -z "${_GHC_LIBDIR_CACHE}" ]]; then
_GHC_LIBDIR_CACHE="$($(ghc-getghc) --print-libdir)"
fi
echo "${_GHC_LIBDIR_CACHE}"
}
# @FUNCTION: ghc-make-args
# @DESCRIPTION:
# Returns default arguments passed along 'ghc --make'
# build mode. Used mainly to enable parallel build mode.
ghc-make-args() {
local ghc_make_args=()
# parallel on all available cores
if ghc-supports-smp && ghc-supports-parallel-make; then
# It should have been just -j$(makeopts_jobs)
# but GHC does not yet have nice defaults:
# https://ghc.haskell.org/trac/ghc/ticket/9221#comment:57
# SMP is a requirement for parallel GC's gen0
# 'qb' balancing.
echo "-j$(makeopts_jobs) +RTS -A256M -qb0 -RTS"
ghc_make_args=()
fi
echo "${ghc_make_args[@]}"
}
# @FUNCTION: ghc-confdir
# @DESCRIPTION:
# returns the (Gentoo) library configuration directory, we
# store here a hint for 'haskell-updater' about packages
# installed for old ghc versions and current ones.
ghc-confdir() {
echo "$(ghc-libdir)/gentoo"
}
# @FUNCTION: ghc-package-db
# @DESCRIPTION:
# returns the global package database directory
ghc-package-db() {
echo "$(ghc-libdir)/package.conf.d"
}
# @FUNCTION: ghc-localpkgconfd
# @DESCRIPTION:
# returns the name of the local (package-specific)
# package configuration file
ghc-localpkgconfd() {
echo "${PF}.conf.d"
}
# @FUNCTION: ghc-package-exists
# @DESCRIPTION:
# tests if a ghc package exists
ghc-package-exists() {
$(ghc-getghcpkg) describe "$1" > /dev/null 2>&1
}
# @FUNCTION: check-for-collisions
# @DESCRIPTION:
# makes sure no packages
# have the same version as initial package setup
check-for-collisions() {
local localpkgconf=$1
local checked_pkg
local initial_pkg_db="$(ghc-libdir)/package.conf.d.initial"
for checked_pkg in `$(ghc-getghcpkgbin) -f "${localpkgconf}" list --simple-output`
do
# should return empty output
local collided=`$(ghc-getghcpkgbin) -f ${initial_pkg_db} list --simple-output "${checked_pkg}"`
if [[ -n ${collided} ]]; then
eerror "Cabal package '${checked_pkg}' is shipped with '$(ghc-pm-version)' ('$(ghc-version)')."
eerror "Ebuild author forgot an entry in CABAL_CORE_LIB_GHC_PV='${CABAL_CORE_LIB_GHC_PV}'."
eerror "Found in ${initial_pkg_db}."
die
fi
done
}
# @FUNCTION: ghc-install-pkg
# @DESCRIPTION:
# moves the local (package-specific) package configuration
# file to its final destination
ghc-install-pkg() {
local localpkgconf="${T}/$(ghc-localpkgconfd)"
local pkg_path pkg pkg_db="${D}/$(ghc-package-db)" hint_db="${D}/$(ghc-confdir)"
$(ghc-getghcpkgbin) init "${localpkgconf}" || die "Failed to initialize empty local db"
for pkg_config_file in "$@"; do
$(ghc-getghcpkgbin) -f "${localpkgconf}" update - --force \
< "${pkg_config_file}" || die "failed to register ${pkg}"
done
check-for-collisions "${localpkgconf}"
mkdir -p "${pkg_db}" || die
for pkg_path in "${localpkgconf}"/*.conf; do
pkg=$(basename "${pkg_path}")
cp "${pkg_path}" "${pkg_db}/${pkg}" || die
done
mkdir -p "${hint_db}" || die
for pkg_config_file in "$@"; do
local pkg_name="gentoo-${CATEGORY}-${PF}-"$(basename "${pkg_config_file}")
cp "${pkg_config_file}" "${hint_db}/${pkg_name}" || die
chmod 0644 "${hint_db}/${pkg_name}" || die
done
}
# @FUNCTION: ghc-recache-db
# @DESCRIPTION:
# updates 'package.cache' binary cacne for registered '*.conf'
# packages
ghc-recache-db() {
einfo "Recaching GHC package DB"
$(ghc-getghcpkg) recache
}
# @FUNCTION: ghc-register-pkg
# @DESCRIPTION:
# registers all packages in the local (package-specific)
# package configuration file
ghc-register-pkg() {
ghc-recache-db
}
# @FUNCTION: ghc-reregister
# @DESCRIPTION:
# re-adds all available .conf files to the global
# package conf file, to be used on a ghc reinstallation
ghc-reregister() {
ghc-recache-db
}
# @FUNCTION: ghc-unregister-pkg
# @DESCRIPTION:
# unregisters a package configuration file
ghc-unregister-pkg() {
ghc-recache-db
}
# @FUNCTION: ghc-pkgdeps
# @DESCRIPTION:
# exported function: loads a package dependency in a form
# cabal_package version
ghc-pkgdeps() {
echo $($(ghc-getghcpkg) describe "${1}") \
| sed \
-e '/depends/,/^.*:/ !d' \
-e 's/\(.*\)-\(.*\)-\(.*\)/\1 \2/' \
-e 's/^.*://g'
}
# @FUNCTION: ghc-package_pkg_postinst
# @DESCRIPTION:
# updates package.cache after package install
ghc-package_pkg_postinst() {
ghc-recache-db
}
# @FUNCTION: ghc-package_pkg_prerm
# @DESCRIPTION:
# updates package.cache after package deinstall
ghc-package_pkg_prerm() {
ewarn "ghc-package.eclass: 'ghc-package_pkg_prerm()' is a noop"
ewarn "ghc-package.eclass: consider 'haskell-cabal_pkg_postrm()' instead"
}
# @FUNCTION: ghc-package_pkg_postrm
# @DESCRIPTION:
# updates package.cache after package deinstall
ghc-package_pkg_postrm() {
ghc-recache-db
}

1129
eclass/git-r3.eclass Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,100 @@
# Copyright 1999-2018 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: gkrellm-plugin.eclass
# @MAINTAINER:
# maintainer-needed@gentoo.org
# @AUTHOR:
# Original author: Jim Ramsay
# EAPI 6 author: David Seifert
# @SUPPORTED_EAPIS: 6
# @BLURB: Provides src_install used by (almost) all gkrellm plugins
# @DESCRIPTION:
# - Sets up default dependencies
# - Provides a common src_install method to avoid code duplication
#
# Changelog:
# 03 January 2018: David Seifert <soap@gentoo.org>
# - Port to EAPI 6, remove built_with_use, simplify a lot
# 12 March 2007: Jim Ramsay <lack@gentoo.org>
# - Added server plugin support
# 09 March 2007: Jim Ramsay <lack@gentoo.org>
# - Initial commit
#
# @ECLASS-VARIABLE: PLUGIN_SO
# @DESCRIPTION:
# The name of the plugin's .so file which will be installed in
# the plugin dir. Defaults to "${PN}$(get_modname)". Has to be a bash array.
# @ECLASS-VARIABLE: PLUGIN_SERVER_SO
# @DESCRIPTION:
# The name of the plugin's server plugin $(get_modname) portion.
# Unset by default. Has to be a bash array.
# @ECLASS-VARIABLE: PLUGIN_DOCS
# @DESCRIPTION:
# An optional list of docs to be installed, in addition to the default
# DOCS variable which is respected too. Has to be a bash array.
case ${EAPI:-0} in
[0-5])
die "${ECLASS} is banned in EAPI ${EAPI:-0}"
;;
6)
;;
*)
die "Unknown EAPI ${EAPI:-0}"
;;
esac
inherit multilib
EXPORT_FUNCTIONS src_install
if [[ ! ${_GKRELLM_PLUGIN_R1} ]]; then
DEPEND="virtual/pkgconfig"
# @FUNCTION: gkrellm-plugin_src_install
# @DESCRIPTION:
# Install the plugins and call einstalldocs
gkrellm-plugin_src_install() {
exeinto /usr/$(get_libdir)/gkrellm2/plugins
if ! declare -p PLUGIN_SO >/dev/null 2>&1 ; then
doexe ${PN}$(get_modname)
elif declare -p PLUGIN_SO | grep -q "^declare -a " ; then
doexe "${PLUGIN_SO[@]}"
else
die "PLUGIN_SO has to be a bash array!"
fi
if [[ -n ${PLUGIN_SERVER_SO} ]]; then
exeinto /usr/$(get_libdir)/gkrellm2/plugins-gkrellmd
if declare -p PLUGIN_SERVER_SO | grep -q "^declare -a " ; then
doexe "${PLUGIN_SERVER_SO[@]}"
else
die "PLUGIN_SERVER_SO has to be a bash array!"
fi
fi
einstalldocs
local d
for d in Changelog* ChangeLog*; do
[[ -s "${d}" ]] && dodoc "${d}"
done
if [[ -n ${PLUGIN_DOCS} ]]; then
if declare -p PLUGIN_DOCS | grep -q "^declare -a " ; then
dodoc "${PLUGIN_DOCS[@]}"
else
die "PLUGIN_DOCS has to be a bash array!"
fi
fi
}
_GKRELLM_PLUGIN_R1=1
fi

53
eclass/gnome.org.eclass Normal file
View File

@ -0,0 +1,53 @@
# Copyright 1999-2019 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: gnome.org.eclass
# @MAINTAINER:
# gnome@gentoo.org
# @AUTHOR:
# Authors: Spidler <spidler@gentoo.org> with help of carparski.
# eclass variable additions and documentation: Gilles Dartiguelongue <eva@gentoo.org>
# @BLURB: Helper eclass for gnome.org hosted archives
# @DESCRIPTION:
# Provide a default SRC_URI for tarball hosted on gnome.org mirrors.
# versionator inherit kept for older EAPIs due to ebuilds (potentially) relying on it
[[ ${EAPI} == [0123456] ]] && inherit eapi7-ver versionator
# @ECLASS-VARIABLE: GNOME_TARBALL_SUFFIX
# @DESCRIPTION:
# Most projects hosted on gnome.org mirrors provide tarballs as tar.bz2 or
# tar.xz. This eclass defaults to bz2 for EAPI 0, 1, 2, 3 and defaults to xz for
# everything else. This is because the gnome mirrors are moving to only have xz
# tarballs for new releases.
if has "${EAPI:-0}" 0 1 2 3; then
: ${GNOME_TARBALL_SUFFIX:="bz2"}
else
: ${GNOME_TARBALL_SUFFIX:="xz"}
fi
# Even though xz-utils are in @system, they must still be added to DEPEND; see
# https://archives.gentoo.org/gentoo-dev/msg_a0d4833eb314d1be5d5802a3b710e0a4.xml
if [[ ${GNOME_TARBALL_SUFFIX} == "xz" ]]; then
if [[ ${EAPI:-0} != [0123456] ]]; then
BDEPEND="app-arch/xz-utils"
else
DEPEND="app-arch/xz-utils"
fi
fi
# @ECLASS-VARIABLE: GNOME_ORG_MODULE
# @DESCRIPTION:
# Name of the module as hosted on gnome.org mirrors.
# Leave unset if package name matches module name.
: ${GNOME_ORG_MODULE:=$PN}
# @ECLASS-VARIABLE: GNOME_ORG_PVP
# @INTERNAL
# @DESCRIPTION:
# Major and minor numbers of the version number.
: ${GNOME_ORG_PVP:=$(ver_cut 1-2)}
SRC_URI="mirror://gnome/sources/${GNOME_ORG_MODULE}/${GNOME_ORG_PVP}/${GNOME_ORG_MODULE}-${PV}.tar.${GNOME_TARBALL_SUFFIX}"
S="${WORKDIR}/${GNOME_ORG_MODULE}-${PV}"

516
eclass/gnome2-utils.eclass Normal file
View File

@ -0,0 +1,516 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: gnome2-utils.eclass
# @MAINTAINER:
# gnome@gentoo.org
# @SUPPORTED_EAPIS: 5 6 7
# @BLURB: Auxiliary functions commonly used by Gnome packages.
# @DESCRIPTION:
# This eclass provides a set of auxiliary functions needed by most Gnome
# packages. It may be used by non-Gnome packages as needed for handling various
# Gnome stack related functions such as:
# * GSettings schemas management
# * GConf schemas management
# * scrollkeeper (old Gnome help system) management
[[ ${EAPI} == 5 ]] && inherit multilib
# eutils.eclass: emktemp
# toolchain-funs.eclass: tc-is-cross-compiler
# xdg-utils.eclass: xdg_environment_reset, xdg_icon_cache_update
inherit eutils toolchain-funcs xdg-utils
case ${EAPI} in
5|6|7) ;;
*) die "EAPI=${EAPI} is not supported" ;;
esac
# @ECLASS-VARIABLE: GCONFTOOL_BIN
# @INTERNAL
# @DESCRIPTION:
# Path to gconftool-2
: ${GCONFTOOL_BIN:="/usr/bin/gconftool-2"}
# @ECLASS-VARIABLE: SCROLLKEEPER_DIR
# @INTERNAL
# @DESCRIPTION:
# Directory where scrollkeeper-update should do its work
: ${SCROLLKEEPER_DIR:="/var/lib/scrollkeeper"}
# @ECLASS-VARIABLE: SCROLLKEEPER_UPDATE_BIN
# @INTERNAL
# @DESCRIPTION:
# Path to scrollkeeper-update
: ${SCROLLKEEPER_UPDATE_BIN:="/usr/bin/scrollkeeper-update"}
# @ECLASS-VARIABLE: GLIB_COMPILE_SCHEMAS
# @INTERNAL
# @DESCRIPTION:
# Path to glib-compile-schemas
: ${GLIB_COMPILE_SCHEMAS:="/usr/bin/glib-compile-schemas"}
# @ECLASS-VARIABLE: GNOME2_ECLASS_SCHEMAS
# @INTERNAL
# @DEFAULT_UNSET
# @DESCRIPTION:
# List of GConf schemas provided by the package
# @ECLASS-VARIABLE: GNOME2_ECLASS_ICONS
# @INTERNAL
# @DEFAULT_UNSET
# @DESCRIPTION:
# List of icons provided by the package
# @ECLASS-VARIABLE: GNOME2_ECLASS_SCROLLS
# @INTERNAL
# @DEFAULT_UNSET
# @DESCRIPTION:
# List of scrolls (documentation files) provided by the package
# @ECLASS-VARIABLE: GNOME2_ECLASS_GLIB_SCHEMAS
# @INTERNAL
# @DEFAULT_UNSET
# @DESCRIPTION:
# List of GSettings schemas provided by the package
# @ECLASS-VARIABLE: GNOME2_ECLASS_GDK_PIXBUF_LOADERS
# @INTERNAL
# @DEFAULT_UNSET
# @DESCRIPTION:
# List of gdk-pixbuf loaders provided by the package
# @FUNCTION: gnome2_environment_reset
# @DESCRIPTION:
# Reset various variables inherited from root's evironment to a reasonable
# default for ebuilds to help avoid access violations and test failures.
gnome2_environment_reset() {
xdg_environment_reset
# Respected by >=glib-2.30.1-r1
export G_HOME="${T}"
# GST_REGISTRY is to work around gst utilities trying to read/write /root
export GST_REGISTRY="${T}/registry.xml"
# Ensure we don't rely on dconf/gconf while building, bug #511946
export GSETTINGS_BACKEND="memory"
if has ${EAPI} 6 7; then
# Try to cover the packages honoring this variable, bug #508124
export GST_INSPECT="$(type -P true)"
# Stop relying on random DISPLAY variable values, bug #534312
unset DISPLAY
fi
}
# @FUNCTION: gnome2_gconf_savelist
# @DESCRIPTION:
# Find the GConf schemas that are about to be installed and save their location
# in the GNOME2_ECLASS_SCHEMAS environment variable.
# This function should be called from pkg_preinst.
gnome2_gconf_savelist() {
pushd "${ED}" > /dev/null || die
export GNOME2_ECLASS_SCHEMAS=$(find 'etc/gconf/schemas/' -name '*.schemas' 2> /dev/null)
popd > /dev/null || die
}
# @FUNCTION: gnome2_gconf_install
# @DESCRIPTION:
# Applies any schema files installed by the current ebuild to Gconf's database
# using gconftool-2.
# This function should be called from pkg_postinst.
gnome2_gconf_install() {
local updater="${EROOT%/}${GCONFTOOL_BIN}"
if [[ -z "${GNOME2_ECLASS_SCHEMAS}" ]]; then
debug-print "No GNOME 2 GConf schemas found"
return
fi
if tc-is-cross-compiler ; then
ewarn "Updating of GNOME 2 GConf schemas skipped due to cross-compilation."
ewarn "You might want to run gconftool-2 manually on the target for"
ewarn "your final image and re-run it when packages installing"
ewarn "GNOME 2 GConf schemas get upgraded or added to the image."
return
fi
if [[ ! -x "${updater}" ]]; then
debug-print "${updater} is not executable"
return
fi
# We are ready to install the GCONF Scheme now
unset GCONF_DISABLE_MAKEFILE_SCHEMA_INSTALL
export GCONF_CONFIG_SOURCE="$("${updater}" --get-default-source | sed "s;:/;:${ROOT%/}/;")"
einfo "Installing GNOME 2 GConf schemas"
local F
for F in ${GNOME2_ECLASS_SCHEMAS}; do
if [[ -e "${EROOT%/}/${F}" ]]; then
debug-print "Installing schema: ${F}"
"${updater}" --makefile-install-rule "${EROOT%/}/${F}" 1>/dev/null
fi
done
# have gconf reload the new schemas
pids=$(pgrep -x gconfd-2)
if [[ $? == 0 ]] ; then
ebegin "Reloading GConf schemas"
kill -HUP ${pids}
eend $?
fi
}
# @FUNCTION: gnome2_gconf_uninstall
# @DESCRIPTION:
# Removes schema files previously installed by the current ebuild from Gconf's
# database.
gnome2_gconf_uninstall() {
local updater="${EROOT%/}${GCONFTOOL_BIN}"
if [[ -z "${GNOME2_ECLASS_SCHEMAS}" ]]; then
debug-print "No GNOME 2 GConf schemas found"
return
fi
if tc-is-cross-compiler ; then
ewarn "Removal of GNOME 2 GConf schemas skipped due to cross-compilation."
ewarn "You might want to run gconftool-2 manually on the target for"
ewarn "your final image to uninstall this package's schemas."
return
fi
if [[ ! -x "${updater}" ]]; then
debug-print "${updater} is not executable"
return
fi
unset GCONF_DISABLE_MAKEFILE_SCHEMA_INSTALL
export GCONF_CONFIG_SOURCE="$("${updater}" --get-default-source | sed "s;:/;:${ROOT%/}/;")"
einfo "Uninstalling GNOME 2 GConf schemas"
local F
for F in ${GNOME2_ECLASS_SCHEMAS}; do
if [[ -e "${EROOT%/}/${F}" ]]; then
debug-print "Uninstalling gconf schema: ${F}"
"${updater}" --makefile-uninstall-rule "${EROOT%/}/${F}" 1>/dev/null
fi
done
# have gconf reload the new schemas
pids=$(pgrep -x gconfd-2)
if [[ $? == 0 ]] ; then
ebegin "Reloading GConf schemas"
kill -HUP ${pids}
eend $?
fi
}
# @FUNCTION: gnome2_omf_fix
# @DESCRIPTION:
# Workaround applied to Makefile rules in order to remove redundant
# calls to scrollkeeper-update and sandbox violations.
# This function should be called from src_prepare.
gnome2_omf_fix() {
local omf_makefiles filename
omf_makefiles="$@"
if [[ -f ${S}/omf.make ]] ; then
omf_makefiles="${omf_makefiles} ${S}/omf.make"
fi
if [[ -f ${S}/gnome-doc-utils.make ]] ; then
omf_makefiles="${omf_makefiles} ${S}/gnome-doc-utils.make"
fi
# testing fixing of all makefiles found
# The sort is important to ensure .am is listed before the respective .in for
# maintainer mode regeneration not kicking in due to .am being newer than .in
for filename in $(find "${S}" -name "Makefile.in" -o -name "Makefile.am" |sort) ; do
omf_makefiles="${omf_makefiles} ${filename}"
done
ebegin "Fixing OMF Makefiles"
local retval=0
local fails=( )
for omf in ${omf_makefiles} ; do
sed -i -e 's:scrollkeeper-update:true:' "${omf}"
retval=$?
if [[ $retval -ne 0 ]] ; then
debug-print "updating of ${omf} failed"
# Add to the list of failures
fails[$(( ${#fails[@]} + 1 ))]=$omf
retval=2
fi
done
eend $retval
for f in "${fails[@]}" ; do
eerror "Failed to update OMF Makefile $f"
done
}
# @FUNCTION: gnome2_scrollkeeper_savelist
# @DESCRIPTION:
# Find the scrolls that are about to be installed and save their location
# in the GNOME2_ECLASS_SCROLLS environment variable.
# This function should be called from pkg_preinst.
gnome2_scrollkeeper_savelist() {
pushd "${ED}" > /dev/null || die
export GNOME2_ECLASS_SCROLLS=$(find 'usr/share/omf' -type f -name "*.omf" 2> /dev/null)
popd > /dev/null || die
}
# @FUNCTION: gnome2_scrollkeeper_update
# @DESCRIPTION:
# Updates the global scrollkeeper database.
# This function should be called from pkg_postinst and pkg_postrm.
gnome2_scrollkeeper_update() {
local updater="${EROOT%/}${SCROLLKEEPER_UPDATE_BIN}"
if [[ -z "${GNOME2_ECLASS_SCROLLS}" ]]; then
debug-print "No scroll cache to update"
return
fi
if tc-is-cross-compiler ; then
ewarn "Updating of scrollkeeper database skipped due to cross-compilation."
ewarn "You might want to run scrollkeeper-update manually on the target"
ewarn "for your final image and re-run it when packages installing"
ewarn "scrollkeeper OMF files get upgraded or added to the image."
return
fi
if [[ ! -x "${updater}" ]] ; then
debug-print "${updater} is not executable"
return
fi
ebegin "Updating scrollkeeper database ..."
"${updater}" -q -p "${EROOT%/}${SCROLLKEEPER_DIR}"
eend $?
}
# @FUNCTION: gnome2_schemas_savelist
# @DESCRIPTION:
# Find if there is any GSettings schema to install and save the list in
# GNOME2_ECLASS_GLIB_SCHEMAS variable. This is only necessary for eclass
# implementations that call gnome2_schemas_update conditionally.
# This function should be called from pkg_preinst.
gnome2_schemas_savelist() {
pushd "${ED}" > /dev/null || die
export GNOME2_ECLASS_GLIB_SCHEMAS=$(find 'usr/share/glib-2.0/schemas' -name '*.gschema.xml' 2>/dev/null)
popd > /dev/null || die
}
# @FUNCTION: gnome2_schemas_update
# @DESCRIPTION:
# Updates GSettings schemas.
# This function should be called from pkg_postinst and pkg_postrm.
gnome2_schemas_update() {
local updater="${EROOT%/}${GLIB_COMPILE_SCHEMAS}"
if tc-is-cross-compiler ; then
ewarn "Updating of GSettings schemas skipped due to cross-compilation."
ewarn "You might want to run glib-compile-schemas manually on the target"
ewarn "for your final image and re-run it when packages installing"
ewarn "GSettings schemas get upgraded or added to the image."
return
fi
if [[ ! -x ${updater} ]]; then
debug-print "${updater} is not executable"
return
fi
ebegin "Updating GSettings schemas"
${updater} --allow-any-name "$@" "${EROOT%/}/usr/share/glib-2.0/schemas" &>/dev/null
eend $?
}
# @FUNCTION: gnome2_gdk_pixbuf_savelist
# @DESCRIPTION:
# Find if there is any gdk-pixbuf loader to install and save the list in
# GNOME2_ECLASS_GDK_PIXBUF_LOADERS variable.
# This function should be called from pkg_preinst.
gnome2_gdk_pixbuf_savelist() {
pushd "${ED}" > /dev/null || die
export GNOME2_ECLASS_GDK_PIXBUF_LOADERS=$(find usr/lib*/gdk-pixbuf-2.0 -type f 2>/dev/null)
popd > /dev/null || die
}
# @FUNCTION: gnome2_gdk_pixbuf_update
# @DESCRIPTION:
# Updates gdk-pixbuf loader cache if GNOME2_ECLASS_GDK_PIXBUF_LOADERS has some.
# This function should be called from pkg_postinst and pkg_postrm.
gnome2_gdk_pixbuf_update() {
local updater="${EROOT%/}/usr/bin/${CHOST}-gdk-pixbuf-query-loaders"
[[ -x ${updater} ]] || updater="${EROOT%/}/usr/bin/gdk-pixbuf-query-loaders"
if [[ -z ${GNOME2_ECLASS_GDK_PIXBUF_LOADERS} ]]; then
debug-print "gdk-pixbuf loader cache does not need an update"
return
fi
if tc-is-cross-compiler ; then
ewarn "Updating of gdk-pixbuf loader cache skipped due to cross-compilation."
ewarn "You might want to run gdk-pixbuf-query-loaders manually on the target"
ewarn "for your final image and re-run it when packages installing"
ewarn "gdk-pixbuf loaders get upgraded or added to the image."
return
fi
if [[ ! -x ${updater} ]]; then
debug-print "${updater} is not executable"
return
fi
ebegin "Updating gdk-pixbuf loader cache"
local tmp_file=$(emktemp)
${updater} 1> "${tmp_file}" &&
chmod 0644 "${tmp_file}" &&
cp -f "${tmp_file}" "${EROOT%/}/usr/$(get_libdir)/gdk-pixbuf-2.0/2.10.0/loaders.cache" &&
rm "${tmp_file}" # don't replace this with mv, required for SELinux support
eend $?
}
# @FUNCTION: gnome2_query_immodules_gtk2
# @DESCRIPTION:
# Updates gtk2 immodules/gdk-pixbuf loaders listing.
gnome2_query_immodules_gtk2() {
local updater=${EPREFIX}/usr/bin/${CHOST}-gtk-query-immodules-2.0
[[ -x ${updater} ]] || updater=${EPREFIX}/usr/bin/gtk-query-immodules-2.0
if [[ ! -x ${updater} ]]; then
debug-print "${updater} is not executable"
return
fi
ebegin "Updating gtk2 input method module cache"
GTK_IM_MODULE_FILE="${EROOT%/}/usr/$(get_libdir)/gtk-2.0/2.10.0/immodules.cache" \
"${updater}" --update-cache
eend $?
}
# @FUNCTION: gnome2_query_immodules_gtk3
# @DESCRIPTION:
# Updates gtk3 immodules/gdk-pixbuf loaders listing.
gnome2_query_immodules_gtk3() {
local updater=${EPREFIX}/usr/bin/${CHOST}-gtk-query-immodules-3.0
[[ -x ${updater} ]] || updater=${EPREFIX}/usr/bin/gtk-query-immodules-3.0
if [[ ! -x ${updater} ]]; then
debug-print "${updater} is not executable"
return
fi
ebegin "Updating gtk3 input method module cache"
GTK_IM_MODULE_FILE="${EROOT%/}/usr/$(get_libdir)/gtk-3.0/3.0.0/immodules.cache" \
"${updater}" --update-cache
eend $?
}
# @FUNCTION: gnome2_giomodule_cache_update
# @DESCRIPTION:
# Updates glib's gio modules cache.
# This function should be called from pkg_postinst and pkg_postrm.
gnome2_giomodule_cache_update() {
local updater="${EROOT%/}/usr/bin/${CHOST}-gio-querymodules"
[[ -x ${updater} ]] || updater="${EROOT%/}/usr/bin/gio-querymodules"
if tc-is-cross-compiler ; then
ewarn "Updating of GIO modules cache skipped due to cross-compilation."
ewarn "You might want to run gio-querymodules manually on the target for"
ewarn "your final image for performance reasons and re-run it when packages"
ewarn "installing GIO modules get upgraded or added to the image."
return
fi
if [[ ! -x ${updater} ]]; then
debug-print "${updater} is not executable"
return
fi
ebegin "Updating GIO modules cache"
${updater} "${EROOT%/}"/usr/$(get_libdir)/gio/modules
eend $?
}
# @FUNCTION: gnome2_disable_deprecation_warning
# @DESCRIPTION:
# Disable deprecation warnings commonly found in glib based packages.
# Should be called from src_prepare.
gnome2_disable_deprecation_warning() {
local retval=0
local fails=( )
local makefile
ebegin "Disabling deprecation warnings"
# The sort is important to ensure .am is listed before the respective .in for
# maintainer mode regeneration not kicking in due to .am being newer than .in
while read makefile ; do
if ! grep -qE "(DISABLE_DEPRECATED|GSEAL_ENABLE)" "${makefile}"; then
continue
fi
LC_ALL=C sed -r -i \
-e 's:-D[A-Z_]+_DISABLE_DEPRECATED:$(/bin/true):g' \
-e 's:-DGSEAL_ENABLE(=[A-Za-z0-9_]*)?:$(/bin/true):g' \
-i "${makefile}"
if [[ $? -ne 0 ]]; then
# Add to the list of failures
fails+=( "${makefile}" )
retval=2
fi
done < <(find "${S}" -name "Makefile.in" \
-o -name "Makefile.am" -o -name "Makefile.decl" \
| sort; [[ -f "${S}"/configure ]] && echo configure)
# TODO: sedding configure.ac can trigger maintainer mode; bug #439602
# -o -name "configure.ac" -o -name "configure.in" \
# | sort; echo configure)
eend ${retval}
for makefile in "${fails[@]}" ; do
ewarn "Failed to disable deprecation warnings in ${makefile}"
done
}
case ${EAPI} in
5|6)
# @FUNCTION: gnome2_icon_savelist
# @DESCRIPTION:
# Find the icons that are about to be installed and save their location
# in the GNOME2_ECLASS_ICONS environment variable. This is only
# necessary for eclass implementations that call
# gnome2_icon_cache_update conditionally.
# This function should be called from pkg_preinst.
gnome2_icon_savelist() {
pushd "${ED}" > /dev/null || die
export GNOME2_ECLASS_ICONS=$(find 'usr/share/icons' -maxdepth 1 -mindepth 1 -type d 2> /dev/null)
popd > /dev/null || die
}
# @FUNCTION: gnome2_icon_cache_update
# @DESCRIPTION:
# Updates Gtk+ icon cache files under /usr/share/icons.
# Deprecated. Please use xdg_icon_cache_update from xdg-utils.eclass
gnome2_icon_cache_update() {
xdg_icon_cache_update
}
;;
esac

337
eclass/gnome2.eclass Normal file
View File

@ -0,0 +1,337 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: gnome2.eclass
# @MAINTAINER:
# gnome@gentoo.org
# @SUPPORTED_EAPIS: 5 6 7
# @BLURB: Provides phases for Gnome/Gtk+ based packages.
# @DESCRIPTION:
# Exports portage base functions used by ebuilds written for packages using the
# GNOME framework. For additional functions, see gnome2-utils.eclass.
# @ECLASS-VARIABLE: GNOME2_EAUTORECONF
# @DEFAULT_UNSET
# @DESCRIPTION:
# Run eautoreconf instead of only elibtoolize
GNOME2_EAUTORECONF=${GNOME2_EAUTORECONF:-""}
[[ ${GNOME2_EAUTORECONF} == 'yes' ]] && inherit autotools
[[ ${EAPI} == [56] ]] && inherit eutils ltprune
inherit libtool gnome.org gnome2-utils xdg
case ${EAPI:-0} in
5)
EXPORT_FUNCTIONS src_unpack src_prepare src_configure src_compile src_install pkg_preinst pkg_postinst pkg_postrm
;;
6|7)
EXPORT_FUNCTIONS src_prepare src_configure src_compile src_install pkg_preinst pkg_postinst pkg_postrm
;;
*) die "EAPI=${EAPI} is not supported" ;;
esac
# @ECLASS-VARIABLE: ELTCONF
# @DEFAULT_UNSET
# @DESCRIPTION:
# Extra options passed to elibtoolize
ELTCONF=${ELTCONF:-""}
# @ECLASS-VARIABLE: G2CONF
# @DEFAULT_UNSET
# @DESCRIPTION:
# Extra configure opts passed to econf.
# Deprecated, pass extra arguments to gnome2_src_configure.
# Banned in eapi6 and newer.
if has ${EAPI} 5; then
G2CONF=${G2CONF:-""}
fi
# @ECLASS-VARIABLE: GCONF_DEBUG
# @DEFAULT_UNSET
# @DESCRIPTION:
# Whether to handle debug or not.
# Some gnome applications support various levels of debugging (yes, no, minimum,
# etc), but using --disable-debug also removes g_assert which makes debugging
# harder. This variable should be set to yes for such packages for the eclass
# to handle it properly. It will enable minimal debug with USE=-debug.
# Note that this is most commonly found in configure.ac as GNOME_DEBUG_CHECK.
#
# Banned since eapi6 as upstream is moving away from this obsolete macro in favor
# of autoconf-archive macros, that do not expose this issue (bug #270919)
if has ${EAPI} 5; then
if [[ ${GCONF_DEBUG} != "no" ]]; then
IUSE="debug"
fi
fi
# @ECLASS-VARIABLE: GNOME2_ECLASS_GIO_MODULES
# @INTERNAL
# @DESCRIPTION:
# Array containing glib GIO modules
# @ECLASS-VARIABLE: GNOME2_LA_PUNT
# @DESCRIPTION:
# In EAPIs 5 and 6, it relies on prune_libtool_files (from ltprune.eclass) for
# this. Later EAPIs use find ... -delete. Available values for GNOME2_LA_PUNT:
# - "no": will not clean any .la files
# - "yes": will run prune_libtool_files --modules
# - If it is not set, it will run prune_libtool_files
GNOME2_LA_PUNT=${GNOME2_LA_PUNT:-""}
# @FUNCTION: gnome2_src_unpack
# @DESCRIPTION:
# Stub function for old EAPI.
gnome2_src_unpack() {
if has ${EAPI} 5; then
unpack ${A}
cd "${S}"
else
die "gnome2_src_unpack is banned since eapi6"
fi
}
# @FUNCTION: gnome2_src_prepare
# @DESCRIPTION:
# Prepare environment for build, fix build of scrollkeeper documentation,
# run elibtoolize.
gnome2_src_prepare() {
xdg_src_prepare
# Prevent assorted access violations and test failures
gnome2_environment_reset
# Prevent scrollkeeper access violations
# We stop to run it from eapi6 as scrollkeeper helpers from
# rarian are not running anything and, then, access violations
# shouldn't occur.
has ${EAPI} 5 && gnome2_omf_fix
# Disable all deprecation warnings
gnome2_disable_deprecation_warning
# Run libtoolize or eautoreconf, bug #591584
# https://bugzilla.gnome.org/show_bug.cgi?id=655517
if [[ ${GNOME2_EAUTORECONF} == 'yes' ]]; then
eautoreconf
else
elibtoolize ${ELTCONF}
fi
}
# @FUNCTION: gnome2_src_configure
# @DESCRIPTION:
# Gnome specific configure handling
gnome2_src_configure() {
# Deprecated for a long time now and banned since eapi6, see Gnome team policies
if [[ -n ${G2CONF} ]] ; then
if has ${EAPI} 5; then
eqawarn "G2CONF set, please review documentation at https://wiki.gentoo.org/wiki/Project:GNOME/Gnome_Team_Ebuild_Policies#G2CONF_and_src_configure"
else
die "G2CONF set, please review documentation at https://wiki.gentoo.org/wiki/Project:GNOME/Gnome_Team_Ebuild_Policies#G2CONF_and_src_configure"
fi
fi
local g2conf=()
if has ${EAPI} 5; then
if [[ ${GCONF_DEBUG} != 'no' ]] ; then
if use debug ; then
g2conf+=( --enable-debug=yes )
fi
fi
else
if [[ -n ${GCONF_DEBUG} ]] ; then
die "GCONF_DEBUG is banned since eapi6 in favor of each ebuild taking care of the proper handling of debug configure option"
fi
fi
# We consider packages installing gtk-doc to be handled by adding
# DEPEND="dev-util/gtk-doc-am" which provides tools to relink URLs in
# documentation to already installed documentation. This decision also
# greatly helps with constantly broken doc generation.
# Remember to drop 'doc' USE flag from your package if it was only used to
# rebuild docs.
if grep -q "enable-gtk-doc" "${ECONF_SOURCE:-.}"/configure ; then
g2conf+=( --disable-gtk-doc )
fi
# Pass --disable-maintainer-mode when needed
if grep -q "^[[:space:]]*AM_MAINTAINER_MODE(\[enable\])" \
"${ECONF_SOURCE:-.}"/configure.*; then
g2conf+=( --disable-maintainer-mode )
fi
# Pass --disable-scrollkeeper when possible
if grep -q "disable-scrollkeeper" "${ECONF_SOURCE:-.}"/configure; then
g2conf+=( --disable-scrollkeeper )
fi
# Pass --disable-schemas-install when possible
if grep -q "disable-schemas-install" "${ECONF_SOURCE:-.}"/configure; then
g2conf+=( --disable-schemas-install )
fi
# Pass --disable-schemas-compile when possible
if grep -q "disable-schemas-compile" "${ECONF_SOURCE:-.}"/configure; then
g2conf+=( --disable-schemas-compile )
fi
# Pass --disable-update-mimedb when possible
if grep -q "disable-update-mimedb" "${ECONF_SOURCE:-.}"/configure; then
g2conf+=( --disable-update-mimedb )
fi
# Pass --enable-compile-warnings=minimum as we don't want -Werror* flags, bug #471336
if grep -q "enable-compile-warnings" "${ECONF_SOURCE:-.}"/configure; then
g2conf+=( --enable-compile-warnings=minimum )
fi
# Pass --docdir with proper directory, bug #482646 (not needed since eapi6)
if has ${EAPI} 5; then
if grep -q "^ *--docdir=" "${ECONF_SOURCE:-.}"/configure; then
g2conf+=( --docdir="${EPREFIX}"/usr/share/doc/${PF} )
fi
fi
# Avoid sandbox violations caused by gnome-vfs (bug #128289 and #345659)
if has ${EAPI} 5; then
addwrite "$(unset HOME; echo ~)/.gnome2"
else
addpredict "$(unset HOME; echo ~)/.gnome2"
fi
if has ${EAPI} 5; then
econf ${g2conf[@]} ${G2CONF} "$@"
else
econf ${g2conf[@]} "$@"
fi
}
# @FUNCTION: gnome2_src_compile
# @DESCRIPTION:
# Only default src_compile for now
gnome2_src_compile() {
if has ${EAPI} 5; then
emake
else
default
fi
}
# @FUNCTION: gnome2_src_install
# @DESCRIPTION:
# Gnome specific install. Handles typical GConf and scrollkeeper setup
# in packages and removal of .la files if requested
gnome2_src_install() {
# we must delay gconf schema installation due to sandbox
export GCONF_DISABLE_MAKEFILE_SCHEMA_INSTALL="1"
local sk_tmp_dir="/var/lib/scrollkeeper"
# scrollkeeper-update from rarian doesn't do anything. Then, since eapi6
# we stop taking care of it
#
# if this is not present, scrollkeeper-update may segfault and
# create bogus directories in /var/lib/
if has ${EAPI} 5; then
dodir "${sk_tmp_dir}" || die "dodir failed"
emake DESTDIR="${D}" "scrollkeeper_localstate_dir=${ED}${sk_tmp_dir} " "$@" install || die "install failed"
else
default
fi
unset GCONF_DISABLE_MAKEFILE_SCHEMA_INSTALL
# Handle documentation as 'default' for eapi5, bug #373131
# Since eapi6 this is handled by default on its own plus MAINTAINERS and HACKING
# files that are really common in gnome packages (bug #573390)
if has ${EAPI} 5; then
einstalldocs
else
local d
for d in HACKING MAINTAINERS; do
[[ -s "${d}" ]] && dodoc "${d}"
done
fi
# Do not keep /var/lib/scrollkeeper because:
# 1. The scrollkeeper database is regenerated at pkg_postinst()
# 2. ${ED}/var/lib/scrollkeeper contains only indexes for the current pkg
# thus it makes no sense if pkg_postinst ISN'T run for some reason.
rm -rf "${ED}${sk_tmp_dir}"
rmdir "${ED}/var/lib" 2>/dev/null
rmdir "${ED}/var" 2>/dev/null
# Make sure this one doesn't get in the portage db
rm -fr "${ED}/usr/share/applications/mimeinfo.cache"
# Delete all .la files
if has ${EAPI} 5 6; then
case "${GNOME2_LA_PUNT}" in
yes) prune_libtool_files --modules;;
no) ;;
*) prune_libtool_files;;
esac
else
if [[ ${GNOME2_LA_PUNT} != 'no' ]]; then
find "${ED}" -name '*.la' -delete || die
fi
fi
}
# @FUNCTION: gnome2_pkg_preinst
# @DESCRIPTION:
# Finds Icons, GConf and GSettings schemas for later handling in pkg_postinst
gnome2_pkg_preinst() {
xdg_pkg_preinst
gnome2_gconf_savelist
gnome2_schemas_savelist
gnome2_scrollkeeper_savelist
gnome2_gdk_pixbuf_savelist
local f
GNOME2_ECLASS_GIO_MODULES=()
while IFS= read -r -d '' f; do
GNOME2_ECLASS_GIO_MODULES+=( ${f} )
done < <(cd "${D}" && find usr/$(get_libdir)/gio/modules -type f -print0 2>/dev/null)
export GNOME2_ECLASS_GIO_MODULES
}
# @FUNCTION: gnome2_pkg_postinst
# @DESCRIPTION:
# Handle scrollkeeper, GConf, GSettings, Icons, desktop and mime
# database updates.
gnome2_pkg_postinst() {
xdg_pkg_postinst
gnome2_gconf_install
if [[ -n ${GNOME2_ECLASS_GLIB_SCHEMAS} ]]; then
gnome2_schemas_update
fi
gnome2_scrollkeeper_update
gnome2_gdk_pixbuf_update
if [[ ${#GNOME2_ECLASS_GIO_MODULES[@]} -gt 0 ]]; then
gnome2_giomodule_cache_update
fi
}
# # FIXME Handle GConf schemas removal
#gnome2_pkg_prerm() {
# gnome2_gconf_uninstall
#}
# @FUNCTION: gnome2_pkg_postrm
# @DESCRIPTION:
# Handle scrollkeeper, GSettings, Icons, desktop and mime database updates.
gnome2_pkg_postrm() {
xdg_pkg_postrm
if [[ -n ${GNOME2_ECLASS_GLIB_SCHEMAS} ]]; then
gnome2_schemas_update
fi
gnome2_scrollkeeper_update
if [[ ${#GNOME2_ECLASS_GIO_MODULES[@]} -gt 0 ]]; then
gnome2_giomodule_cache_update
fi
}

90
eclass/gnuconfig.eclass Normal file
View File

@ -0,0 +1,90 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
#
# Author: Will Woods <wwoods@gentoo.org>
#
# This eclass is used to automatically update files that typically come with
# automake to the newest version available on the system. The most common use
# of this is to update config.guess and config.sub when configure dies from
# misguessing your canonical system name (CHOST). It can also be used to update
# other files that come with automake, e.g. depcomp, mkinstalldirs, etc.
#
# usage: gnuconfig_update [file1 file2 ...]
# if called without arguments, config.guess and config.sub will be updated.
# All files in the source tree ($S) with the given name(s) will be replaced
# with the newest available versions chosen from the list of locations in
# gnuconfig_findnewest(), below.
#
# gnuconfig_update should generally be called from src_unpack()
DEPEND="sys-devel/gnuconfig"
# Wrapper function for gnuconfig_do_update. If no arguments are given, update
# config.sub and config.guess (old default behavior), otherwise update the
# named files.
gnuconfig_update() {
local startdir # declared here ... used in gnuconfig_do_update
if [[ $1 == /* ]] ; then
startdir=$1
shift
else
startdir=${S}
fi
if [[ $# -gt 0 ]] ; then
gnuconfig_do_update "$@"
else
gnuconfig_do_update config.sub config.guess
fi
return $?
}
# Copy the newest available version of specified files over any old ones in the
# source dir. This function shouldn't be called directly - use gnuconfig_update
#
# Note that since bash using dynamic scoping, startdir is available here from
# the gnuconfig_update function
gnuconfig_do_update() {
local configsubs_dir target targetlist file
[[ $# -eq 0 ]] && die "do not call gnuconfig_do_update; use gnuconfig_update"
configsubs_dir=$(gnuconfig_findnewest)
einfo "Using GNU config files from ${configsubs_dir}"
for file in "$@" ; do
if [[ ! -r ${configsubs_dir}/${file} ]] ; then
eerror "Can't read ${configsubs_dir}/${file}, skipping.."
continue
fi
targetlist=$(find "${startdir}" -name "${file}")
if [[ -n ${targetlist} ]] ; then
for target in ${targetlist} ; do
[[ -L ${target} ]] && rm -f "${target}"
einfo " Updating ${target/$startdir\//}"
cp -f "${configsubs_dir}/${file}" "${target}"
eend $?
done
else
ewarn " No ${file} found in ${startdir}, skipping ..."
fi
done
return 0
}
# this searches the standard locations for the newest config.{sub|guess}, and
# returns the directory where they can be found.
gnuconfig_findnewest() {
local locations=(
"${EPREFIX}"/usr/share/misc/config.sub
"${EPREFIX}"/usr/share/gnuconfig/config.sub
"${EPREFIX}"/usr/share/automake*/config.sub
"${EPREFIX}"/usr/share/libtool/config.sub
)
grep -s '^timestamp' "${locations[@]}" | \
sort -r -n -t\' -k2 | \
sed -n '1{s,/config.sub:.*$,,;p;q}'
}

26
eclass/gnustep-2.eclass Normal file
View File

@ -0,0 +1,26 @@
# Copyright 1999-2019 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: gnustep-2.eclass
# @MAINTAINER:
# GNUstep Herd <gnustep@gentoo.org>
# @SUPPORTED_EAPIS: 0 1 2 3 4 5 6 7
# @BLURB: eclass for GNUstep Apps, Frameworks, and Bundles build
# @DESCRIPTION:
# This eclass sets up GNUstep environment to properly install
# GNUstep packages
inherit gnustep-base
DEPEND=">=gnustep-base/gnustep-make-2.0
virtual/gnustep-back"
RDEPEND="${DEPEND}"
# The following gnustep-based EXPORT_FUNCTIONS are available:
# * gnustep-base_pkg_setup
# * gnustep-base_src_unpack (EAPI 0|1 only)
# * gnustep-base_src_prepare (EAPI>=2 only)
# * gnustep-base_src_configure (EAPI>=2 only)
# * gnustep-base_src_compile
# * gnustep-base_src_install
# * gnustep-base_pkg_postinst

269
eclass/gnustep-base.eclass Normal file
View File

@ -0,0 +1,269 @@
# Copyright 1999-2018 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: gnustep-base.eclass
# @MAINTAINER:
# GNUstep Herd <gnustep@gentoo.org>
# @SUPPORTED_EAPIS: 0 1 2 3 4 5 6 7
# @BLURB: Internal handling of GNUstep pacakges
# @DESCRIPTION:
# Inner gnustep eclass, should only be inherited directly by gnustep-base
# packages
inherit eutils flag-o-matic
# IUSE variables across all GNUstep packages
# "debug": enable code for debugging
# "doc": build and install documentation, if available
IUSE="debug doc"
# packages needed to build any base gnustep package
GNUSTEP_CORE_DEPEND="doc? ( virtual/texi2dvi dev-tex/latex2html app-text/texi2html )"
# New layout is used when ${EPREFIX}/usr/share/GNUstep/Makefiles exists
# Where to install GNUstep (with old layout)
GNUSTEP_PREFIX="${EPREFIX}/usr/GNUstep"
# GNUstep environment array
typeset -a GS_ENV
# Ebuild function overrides
gnustep-base_pkg_setup() {
if test_version_info 3.3 ; then
strip-unsupported-flags
elif test_version_info 3.4 ; then
# strict-aliasing is known to break obj-c stuff in gcc-3.4*
filter-flags -fstrict-aliasing
fi
# known to break ObjC (bug 86089)
filter-flags -fomit-frame-pointer
}
gnustep-base_src_unpack() {
unpack ${A}
cd "${S}"
gnustep-base_src_prepare
}
gnustep-base_src_prepare() {
if [[ -f ./GNUmakefile ]] ; then
# Kill stupid includes that are simply overdone or useless on normal
# Gentoo, but (may) cause major headaches on Prefixed Gentoo. If this
# only removes a part of a path it's good that it bails out, as we want
# to know when they use some direct include.
ebegin "Cleaning paths from GNUmakefile"
sed -i \
-e 's|-I/usr/X11R6/include/\?||g' \
-e 's|-I/usr/include/\?||g' \
-e 's|-L/usr/X11R6/lib/\?||g' \
-e 's|-L/usr/lib/\?||g' \
GNUmakefile
eend $?
fi
! has ${EAPI:-0} 0 1 2 3 4 5 && default
}
gnustep-base_src_configure() {
egnustep_env
if [[ -x ./configure ]] ; then
econf || die "configure failed"
fi
}
gnustep-base_src_compile() {
egnustep_env
case ${EAPI:-0} in
0|1) gnustep-base_src_configure ;;
esac
egnustep_make
}
gnustep-base_src_install() {
egnustep_env
egnustep_install
if use doc ; then
egnustep_env
egnustep_doc
fi
egnustep_install_config
}
gnustep-base_pkg_postinst() {
[[ $(type -t gnustep_config_script) != "function" ]] && return 0
local SCRIPT_PATH
if [[ -d ${EPREFIX}/usr/share/GNUstep/Makefiles ]]; then
SCRIPT_PATH="/usr/bin"
else
SCRIPT_PATH=${GNUSTEP_SYSTEM_TOOLS}/Gentoo
fi
elog "To use this package, as *user* you should run:"
elog " ${SCRIPT_PATH}/config-${PN}.sh"
}
# Clean/reset an ebuild to the installed GNUstep environment
egnustep_env() {
# Get additional variables
GNUSTEP_SH_EXPORT_ALL_VARIABLES="true"
# Makefiles path
local GS_MAKEFILES
if [[ -d ${EPREFIX}/usr/share/GNUstep/Makefiles ]]; then
GS_MAKEFILES=${EPREFIX}/usr/share/GNUstep/Makefiles
else
GS_MAKEFILES=${GNUSTEP_PREFIX}/System/Library/Makefiles
fi
if [[ -f ${GS_MAKEFILES}/GNUstep.sh ]] ; then
# Reset GNUstep variables
source "${GS_MAKEFILES}"/GNUstep-reset.sh
source "${GS_MAKEFILES}"/GNUstep.sh
# Create compilation GNUstep.conf if it does not exist yet
if [[ ! -f ${WORKDIR}/GNUstep.conf ]]; then
cp "${EPREFIX}"/etc/GNUstep/GNUstep.conf "${WORKDIR}" \
|| die "GNUstep.conf copy failed"
sed -e "s#\(GNUSTEP_USER_.*DIR.*=\)#\1${WORKDIR}/#" \
-i "${WORKDIR}"/GNUstep.conf || die "GNUstep.conf sed failed"
fi
if [[ ! -d ${EPREFIX}/usr/share/GNUstep/Makefiles ]]; then
# Set rpath in ldflags when available
case ${CHOST} in
*-linux-gnu|*-solaris*)
is-ldflagq -Wl,-rpath="${GNUSTEP_SYSTEM_LIBRARIES}" \
|| append-ldflags \
-Wl,-rpath="${GNUSTEP_SYSTEM_LIBRARIES}"
;;
esac
fi
# Set up env vars for make operations
GS_ENV=( AUXILIARY_LDFLAGS="${LDFLAGS}" \
ADDITIONAL_NATIVE_LIB_DIRS="${GNUSTEP_SYSTEM_LIBRARIES}" \
DESTDIR="${D}" \
HOME="${T}" \
GNUSTEP_CONFIG_FILE="${WORKDIR}"/GNUstep.conf \
GNUSTEP_INSTALLATION_DOMAIN=SYSTEM \
TAR_OPTIONS="${TAR_OPTIONS} --no-same-owner" \
messages=yes )
use doc \
&& GS_ENV=( "${GS_ENV[@]}" VARTEXFONTS="${T}"/fonts )
use debug \
&& GS_ENV=( "${GS_ENV[@]}" "debug=yes" ) \
|| GS_ENV=( "${GS_ENV[@]}" "debug=no" )
if has_version "gnustep-base/gnustep-make[libobjc2]";
then
# Set clang for packages that do not respect gnustep-make
# settings (gnustep-base's configure for example)
export CC=clang CXX=clang CPP="clang -E" LD="clang"
fi
return 0
fi
die "gnustep-make not installed!"
}
# Make utilizing GNUstep Makefiles
egnustep_make() {
if [[ -f ./Makefile || -f ./makefile || -f ./GNUmakefile ]] ; then
emake ${*} "${GS_ENV[@]}" all || die "package make failed"
return 0
fi
die "no Makefile found"
}
# Make-install utilizing GNUstep Makefiles
egnustep_install() {
if [[ ! -d ${EPREFIX}/usr/share/GNUstep/Makefiles ]]; then
# avoid problems due to our "weird" prefix, make sure it exists
mkdir -p "${D}"${GNUSTEP_SYSTEM_TOOLS}
fi
if [[ -f ./[mM]akefile || -f ./GNUmakefile ]] ; then
emake ${*} "${GS_ENV[@]}" install || die "package install failed"
return 0
fi
die "no Makefile found"
}
# Make and install docs using GNUstep Makefiles
egnustep_doc() {
if [[ -d "${S}"/Documentation ]] ; then
# Check documentation presence
pushd "${S}"/Documentation || die
if [[ -f ./[mM]akefile || -f ./GNUmakefile ]] ; then
emake "${GS_ENV[@]}" all || die "doc make failed"
emake "${GS_ENV[@]}" install || die "doc install failed"
fi
popd || die
fi
}
egnustep_install_config() {
[[ $(type -t gnustep_config_script) != "function" ]] && return 0
local cfile=config-${PN}.sh
cat << 'EOF' > "${T}"/${cfile}
#!/usr/bin/env bash
gnustep_append_default() {
if [[ -z $1 || -z $2 || -z $3 ]]; then
echo "warning: invalid script invocation"
return
fi
dom=$1
key=$2
val=$3
cur=$(defaults read ${dom} ${key}) 2> /dev/null
if [[ -z $cur ]] ; then
echo " * setting ${dom} ${key}"
defaults write ${dom} ${key} "( ${val} )"
elif [[ ${cur} != *${val}* ]] ; then
echo " * adding ${val} to ${dom} ${key}"
echo "${cur%)\'}, \"${val}\" )'" | defaults write
else
echo " * ${val} already present in ${dom} ${key}"
fi
}
gnustep_set_default() {
if [[ -z $1 || -z $2 || -z $3 ]]; then
echo "warning: invalid script invocation"
return
fi
dom=$1
key=$2
val=$3
echo " * setting ${dom} ${key}"
defaults write ${dom} ${key} ${val}
}
EOF
echo "echo \"Applying ${P} default configuration ...\"" >> "${T}"/${cfile}
gnustep_config_script | \
while read line ; do
echo "${line}" >> "${T}"/${cfile}
done
echo 'echo "done"' >> "${T}"/${cfile}
if [[ -d ${EPREFIX}/usr/share/GNUstep/Makefiles ]]; then
exeinto /usr/bin
else
exeinto ${GNUSTEP_SYSTEM_TOOLS#${EPREFIX}}/Gentoo
fi
doexe "${T}"/${cfile}
}
case ${EAPI:-0} in
0|1) EXPORT_FUNCTIONS pkg_setup src_unpack src_compile src_install pkg_postinst ;;
*) EXPORT_FUNCTIONS pkg_setup src_prepare src_configure src_compile src_install pkg_postinst ;;
esac

418
eclass/go-module.eclass Normal file
View File

@ -0,0 +1,418 @@
# Copyright 2019-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: go-module.eclass
# @MAINTAINER:
# William Hubbs <williamh@gentoo.org>
# @AUTHOR:
# William Hubbs <williamh@gentoo.org>
# Robin H. Johnson <robbat2@gentoo.org>
# @SUPPORTED_EAPIS: 7
# @BLURB: basic eclass for building software written as go modules
# @DESCRIPTION:
# This eclass provides basic settings and functions needed by all software
# written in the go programming language that uses modules.
#
# If the software you are packaging has a file named go.mod in its top
# level directory, it uses modules and your ebuild should inherit this
# eclass. If it does not, your ebuild should use the golang-* eclasses.
#
# If, besides go.mod, your software has a directory named vendor in its
# top level directory, the only thing you need to do is inherit the
# eclass. If there is no vendor directory, you need to also populate
# EGO_SUM and call go-module_set_globals as discussed below.
#
# Since Go programs are statically linked, it is important that your ebuild's
# LICENSE= setting includes the licenses of all statically linked
# dependencies. So please make sure it is accurate.
# You can use a utility like dev-util/golicense (network connectivity is
# required) to extract this information from the compiled binary.
#
# @EXAMPLE:
#
# @CODE
#
# inherit go-module
#
# EGO_SUM=(
# "github.com/aybabtme/rgbterm v0.0.0-20170906152045-cc83f3b3ce59/go.mod"
# "github.com/aybabtme/rgbterm v0.0.0-20170906152045-cc83f3b3ce59"
# )
#
# go-module_set_globals
#
# SRC_URI="https://github.com/example/${PN}/archive/v${PV}.tar.gz -> ${P}.tar.gz
# ${EGO_SUM_SRC_URI}"
#
# @CODE
case ${EAPI:-0} in
7) ;;
*) die "${ECLASS} EAPI ${EAPI} is not supported."
esac
if [[ -z ${_GO_MODULE} ]]; then
_GO_MODULE=1
BDEPEND=">=dev-lang/go-1.12"
# Workaround for pkgcheck false positive: https://github.com/pkgcore/pkgcheck/issues/214
# MissingUnpackerDep: version ...: missing BDEPEND="app-arch/unzip"
# Added here rather than to each affected package, so it can be cleaned up just
# once when pkgcheck is improved.
BDEPEND+=" app-arch/unzip"
# Force go to build in module mode.
# In this mode the GOPATH environment variable is ignored.
# this will become the default in the future.
export GO111MODULE=on
# Set the default for the go build cache
# See "go help environment" for information on this setting
export GOCACHE="${T}/go-build"
# The following go flags should be used for all builds.
# -v prints the names of packages as they are compiled
# -x prints commands as they are executed
# -mod=readonly do not update go.mod/go.sum but fail if updates are needed
# -mod=vendor use the vendor directory instead of downloading dependencies
export GOFLAGS="-v -x -mod=readonly"
# Do not complain about CFLAGS etc since go projects do not use them.
QA_FLAGS_IGNORED='.*'
# Go packages should not be stripped with strip(1).
RESTRICT+=" strip"
EXPORT_FUNCTIONS src_unpack pkg_postinst
# @ECLASS-VARIABLE: EGO_SUM
# @DESCRIPTION:
# This is an array based on the go.sum content from inside the target package.
# Each array entry must be quoted and contain information from a single
# line from go.sum.
#
# The format of go.sum is described upstream here:
# https://tip.golang.org/cmd/go/#hdr-Module_authentication_using_go_sum
#
# For inclusion in EGO_SUM, the h1: value and other future extensions SHOULD be
# omitted at this time. The EGO_SUM parser will accept them for ease of ebuild
# creation.
#
# h1:<hash> is the Hash1 structure used by upstream Go
# The Hash1 is MORE stable than Gentoo distfile hashing, and upstream warns
# that it's conceptually possible for the Hash1 value to remain stable while
# the upstream zipfiles change. Here are examples that do NOT change the h1:
# hash, but do change a regular checksum over all bytes of the file:
# - Differing mtimes within zipfile
# - Differing filename ordering with the zipfile
# - Differing zipfile compression parameters
# - Differing zipfile extra fields
#
# For Gentoo usage, the authors of this eclass feel that the h1: hash should
# NOT be included in the EGO_SUM at this time in order to reduce size of the
# ebuilds. This position will be reconsidered in future when a Go module
# distfile collision comes to light, where the Hash1 value of two distfiles is
# the same, but checksums over the file as a byte stream consider the files to
# be different.
#
# This decision does NOT weaken Go module security, as Go will verify the
# go.sum copy of the Hash1 values during building of the package.
# @ECLASS-VARIABLE: _GOMODULE_GOPROXY_BASEURI
# @DESCRIPTION:
# Golang module proxy service to fetch module files from. Note that the module
# proxy generally verifies modules via the Hash1 code.
#
# Users in China may find some mirrors in the default list blocked, and should
# explicitly set an entry in /etc/portage/mirrors for goproxy to
# https://goproxy.cn/ or another mirror that is not blocked in China.
# See https://arslan.io/2019/08/02/why-you-should-use-a-go-module-proxy/ for
# further details
#
# This variable is NOT intended for user-level configuration of mirrors, but
# rather to cover go modules that might exist only on specific Goproxy
# servers for non-technical reasons.
#
# This variable should NOT be present in user-level configuration e.g.
# /etc/portage/make.conf, as it will violate metadata immutability!
#
# I am considering removing this and just hard coding mirror://goproxy
# below, so please do not rely on it.
: "${_GOMODULE_GOPROXY_BASEURI:=mirror://goproxy/}"
# @ECLASS-VARIABLE: _GOMODULE_GOSUM_REVERSE_MAP
# @DESCRIPTION:
# Mapping back from Gentoo distfile name to upstream distfile path.
# Associative array to avoid O(N*M) performance when populating the GOPROXY
# directory structure.
declare -A -g _GOMODULE_GOSUM_REVERSE_MAP
# @FUNCTION: go-module_set_globals
# @DESCRIPTION:
# Convert the information in EGO_SUM for other usage in the ebuild.
# - Populates EGO_SUM_SRC_URI that can be added to SRC_URI
# - Exports _GOMODULE_GOSUM_REVERSE_MAP which provides reverse mapping from
# distfile back to the relative part of SRC_URI, as needed for
# GOPROXY=file:///...
go-module_set_globals() {
local line exts
# for tracking go.sum errors
local error_in_gosum=0
local -a gosum_errorlines
# used make SRC_URI easier to read
local newline=$'\n'
# Now parse EGO_SUM
for line in "${EGO_SUM[@]}"; do
local module version modfile version_modfile kvs x
read -r module version_modfile kvs <<< "${line}"
# kvs contains the hash and may contain other data from
# upstream in the future. We do not currently use any of this data.
# Split 'v0.3.0/go.mod' into 'v0.3.0' and '/go.mod'
# It might NOT have the trailing /go.mod
IFS=/ read -r version modfile x <<<"${version_modfile}"
# Reject multiple slashes
if [[ -n ${x} ]]; then
error_in_gosum=1
gosum_errorlines+=( "Bad version: ${version_modfile}" )
continue
fi
# The modfile variable should be either empty or '/go.mod'
# There is a chance that upstream Go might add something else here in
# the future, and we should be prepared to capture it.
# The .info files do not need to be downloaded, they will be created
# based on the .mod file.
# See https://github.com/golang/go/issues/35922#issuecomment-584824275
exts=()
local errormsg=''
case "${modfile}" in
'') exts=( zip ) ;;
'go.mod'|'/go.mod') exts=( mod ) ;;
*) errormsg="Unknown modfile: line='${line}', modfile='${modfile}'" ;;
esac
# If it was a bad entry, restart the loop
if [[ -n ${errormsg} ]]; then
error_in_gosum=1
gosum_errorlines+=( "${errormsg} line='${line}', modfile='${modfile}'" )
continue
fi
_dir=$(_go-module_gomod_encode "${module}")
for _ext in "${exts[@]}" ; do
# Relative URI within a GOPROXY for a file
_reluri="${_dir}/@v/${version}.${_ext}"
# SRC_URI: LHS entry
_uri="${_GOMODULE_GOPROXY_BASEURI}/${_reluri}"
# _uri="mirror://goproxy/${_reluri}"
# SRC_URI: RHS entry, encode any slash in the path as
# %2F in the filename
_distfile="${_reluri//\//%2F}"
EGO_SUM_SRC_URI+=" ${_uri} -> ${_distfile}${newline}"
_GOMODULE_GOSUM_REVERSE_MAP["${_distfile}"]="${_reluri}"
done
done
if [[ ${error_in_gosum} != 0 ]]; then
eerror "Trailing information in EGO_SUM in ${P}.ebuild"
for line in "${gosum_errorlines[@]}" ; do
eerror "${line}"
done
die "Invalid EGO_SUM format"
fi
# Ensure these variables are not changed past this point
readonly EGO_SUM
readonly EGO_SUM_SRC_URI
readonly _GOMODULE_GOSUM_REVERSE_MAP
# Set the guard that we are safe
_GO_MODULE_SET_GLOBALS_CALLED=1
}
# @FUNCTION: go-module_src_unpack
# @DESCRIPTION:
# If EGO_SUM is set, unpack the base tarball(s) and set up the
# local go proxy.
# - Otherwise, if EGO_VENDOR is set, bail out.
# - Otherwise do a normal unpack.
go-module_src_unpack() {
if [[ "${#EGO_SUM[@]}" -gt 0 ]]; then
_go-module_src_unpack_gosum
elif [[ "${#EGO_VENDOR[@]}" -gt 0 ]]; then
eerror "${EBUILD} is using EGO_VENDOR which is no longer supported"
die "Please update this ebuild"
else
default
fi
}
# @FUNCTION: _go-module_src_unpack_gosum
# @DESCRIPTION:
# Populate a GOPROXY directory hierarchy with distfiles from EGO_SUM and
# unpack the base distfiles.
#
# Exports GOPROXY environment variable so that Go calls will source the
# directory correctly.
_go-module_src_unpack_gosum() {
# shellcheck disable=SC2120
debug-print-function "${FUNCNAME}" "$@"
if [[ ! ${_GO_MODULE_SET_GLOBALS_CALLED} ]]; then
die "go-module_set_globals must be called in global scope"
fi
local goproxy_dir="${T}/go-proxy"
mkdir -p "${goproxy_dir}" || die
# For each Golang module distfile, look up where it's supposed to go, and
# symlink into place.
local f
local goproxy_mod_dir
for f in ${A}; do
goproxy_mod_path="${_GOMODULE_GOSUM_REVERSE_MAP["${f}"]}"
if [[ -n "${goproxy_mod_path}" ]]; then
debug-print-function "Populating go proxy for ${goproxy_mod_path}"
# Build symlink hierarchy
goproxy_mod_dir=$( dirname "${goproxy_dir}"/"${goproxy_mod_path}" )
mkdir -p "${goproxy_mod_dir}" || die
ln -sf "${DISTDIR}"/"${f}" "${goproxy_dir}/${goproxy_mod_path}" ||
die "Failed to ln"
local v=${goproxy_mod_path}
v="${v%.mod}"
v="${v%.zip}"
v="${v//*\/}"
_go-module_gosum_synthesize_files "${goproxy_mod_dir}" "${v}"
else
unpack "$f"
fi
done
export GOPROXY="file://${goproxy_dir}"
# Validate the gosum now
_go-module_src_unpack_verify_gosum
}
# @FUNCTION: _go-module_gosum_synthesize_files
# @DESCRIPTION:
# Given a path & version, populate all Goproxy metadata files which aren't
# needed to be downloaded directly.
# - .../@v/${version}.info
# - .../@v/list
_go-module_gosum_synthesize_files() {
local target=$1
local version=$2
# 'go get' doesn't care about the hash of the .info files, they
# just need a 'version' element!
# This saves a download of a tiny file
# The .time key is omitted, because that is the time a module was added
# to the upstream goproxy, and not metadata about the module itself.
cat >"${target}/${version}.info" <<-EOF
{
"Version": "${version}",
"shortName": "${version}",
"Name": "${version}"
}
EOF
listfile="${target}"/list
if ! grep -sq -x -e "${version}" "${listfile}" 2>/dev/null; then
echo "${version}" >>"${listfile}"
fi
}
# @FUNCTION: _go-module_src_unpack_verify_gosum
# @DESCRIPTION:
# Validate the Go modules declared by EGO_SUM are sufficient to cover building
# the package, without actually building it yet.
_go-module_src_unpack_verify_gosum() {
# shellcheck disable=SC2120
debug-print-function "${FUNCNAME}" "$@"
if [[ ! ${_GO_MODULE_SET_GLOBALS_CALLED} ]]; then
die "go-module_set_globals must be called in global scope"
fi
cd "${S}" || die "cd failed"
# Cleanup the modules before starting anything else
# This will print 'downloading' messages, but it's accessing content from
# the $GOPROXY file:/// URL!
einfo "Tidying go.mod/go.sum"
go mod tidy >/dev/null
# This used to call 'go get' to verify by fetching everything from the main
# go.mod. However 'go get' also turns out to recursively try to fetch
# everything in dependencies, even materials that are used only for tests
# of the dependencies, or code generation.
# If EGO_SUM is missing an entry now, it will fail during the build process
# rather than this helper function.
}
# @FUNCTION: go-module_live_vendor
# @DESCRIPTION:
# This function is used in live ebuilds to vendor the dependencies when
# upstream doesn't vendor them.
go-module_live_vendor() {
debug-print-function "${FUNCNAME}" "$@"
# shellcheck disable=SC2086
has live ${PROPERTIES} ||
die "${FUNCNAME} only allowed in live ebuilds"
[[ "${EBUILD_PHASE}" == unpack ]] ||
die "${FUNCNAME} only allowed in src_unpack"
[[ -d "${S}"/vendor ]] &&
die "${FUNCNAME} only allowed when upstream isn't vendoring"
pushd "${S}" >& /dev/null || die
go mod vendor || die
popd >& /dev/null || die
}
# @FUNCTION: go-module_pkg_postinst
# @DESCRIPTION:
# Display a warning about security updates for Go programs.
go-module_pkg_postinst() {
debug-print-function "${FUNCNAME}" "$@"
[[ -n ${REPLACING_VERSIONS} ]] && return 0
ewarn "${PN} is written in the Go programming language."
ewarn "Since this language is statically linked, security"
ewarn "updates will be handled in individual packages and will be"
ewarn "difficult for us to track as a distribution."
ewarn "For this reason, please update any go packages asap when new"
ewarn "versions enter the tree or go stable if you are running the"
ewarn "stable tree."
}
# @FUNCTION: _go-module_gomod_encode
# @DESCRIPTION:
# Encode the name(path) of a Golang module in the format expected by Goproxy.
#
# Upper letters are replaced by their lowercase version with a '!' prefix.
#
_go-module_gomod_encode() {
## Python:
# return re.sub('([A-Z]{1})', r'!\1', s).lower()
## Sed:
## This uses GNU Sed extension \l to downcase the match
#echo "${module}" |sed 's,[A-Z],!\l&,g'
#
# Bash variant:
debug-print-function "${FUNCNAME}" "$@"
#local re input lower
re='(.*)([A-Z])(.*)'
input="${1}"
while [[ ${input} =~ ${re} ]]; do
lower='!'"${BASH_REMATCH[2],}"
input="${BASH_REMATCH[1]}${lower}${BASH_REMATCH[3]}"
done
echo "${input}"
}
fi

88
eclass/golang-base.eclass Normal file
View File

@ -0,0 +1,88 @@
# Copyright 1999-2019 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: golang-base.eclass
# @MAINTAINER:
# William Hubbs <williamh@gentoo.org>
# @SUPPORTED_EAPIS: 5 6 7
# @BLURB: Eclass that provides base functions for Go packages.
# @DESCRIPTION:
# This eclass provides base functions for software written in the Go
# programming language; it also provides the build-time dependency on
# dev-lang/go.
case "${EAPI:-0}" in
5|6|7)
;;
*)
die "${ECLASS}: Unsupported eapi (EAPI=${EAPI})"
;;
esac
if [[ -z ${_GOLANG_BASE} ]]; then
_GOLANG_BASE=1
GO_DEPEND=">=dev-lang/go-1.10"
if [[ ${EAPI:-0} == [56] ]]; then
DEPEND="${GO_DEPEND}"
else
BDEPEND="${GO_DEPEND}"
fi
# Do not complain about CFLAGS etc since go projects do not use them.
QA_FLAGS_IGNORED='.*'
# Upstream does not support stripping go packages
RESTRICT="strip"
# @ECLASS-VARIABLE: EGO_PN
# @REQUIRED
# @DESCRIPTION:
# This is the import path for the go package to build. Please emerge
# dev-lang/go and read "go help importpath" for syntax.
#
# Example:
# @CODE
# EGO_PN=github.com/user/package
# @CODE
# @FUNCTION: ego_pn_check
# @DESCRIPTION:
# Make sure EGO_PN has a value.
ego_pn_check() {
[[ -z "${EGO_PN}" ]] &&
die "${ECLASS}.eclass: EGO_PN is not set"
return 0
}
# @FUNCTION: get_golibdir
# @DESCRIPTION:
# Return the non-prefixed library directory where Go packages
# should be installed
get_golibdir() {
echo /usr/lib/go-gentoo
}
# @FUNCTION: get_golibdir_gopath
# @DESCRIPTION:
# Return the library directory where Go packages should be installed
# This is the prefixed version which should be included in GOPATH
get_golibdir_gopath() {
echo "${EPREFIX}$(get_golibdir)"
}
# @FUNCTION: golang_install_pkgs
# @DESCRIPTION:
# Install Go packages.
# This function assumes that $cwd is a Go workspace.
golang_install_pkgs() {
debug-print-function ${FUNCNAME} "$@"
ego_pn_check
insinto "$(get_golibdir)"
insopts -m0644 -p # preserve timestamps for bug 551486
doins -r pkg src
}
fi

View File

@ -0,0 +1,85 @@
# Copyright 1999-2019 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: golang-build.eclass
# @MAINTAINER:
# William Hubbs <williamh@gentoo.org>
# @SUPPORTED_EAPIS: 5 6 7
# @BLURB: Eclass for compiling go packages.
# @DESCRIPTION:
# This eclass provides default src_compile, src_test and src_install
# functions for software written in the Go programming language.
inherit golang-base
case "${EAPI:-0}" in
5|6|7)
;;
*)
die "${ECLASS}: Unsupported eapi (EAPI=${EAPI})"
;;
esac
EXPORT_FUNCTIONS src_compile src_install src_test
if [[ -z ${_GOLANG_BUILD} ]]; then
_GOLANG_BUILD=1
# @ECLASS-VARIABLE: EGO_BUILD_FLAGS
# @DEFAULT_UNSET
# @DESCRIPTION:
# This allows you to pass build flags to the Go compiler. These flags
# are common to the "go build" and "go install" commands used below.
# Please emerge dev-lang/go and run "go help build" for the
# documentation for these flags.
#
# Example:
# @CODE
# EGO_BUILD_FLAGS="-ldflags \"-X main.version ${PV}\""
# @CODE
# @ECLASS-VARIABLE: EGO_PN
# @REQUIRED
# @DESCRIPTION:
# This is the import path for the go package(s) to build. Please emerge
# dev-lang/go and read "go help importpath" for syntax.
#
# Example:
# @CODE
# EGO_PN=github.com/user/package
# @CODE
golang-build_src_compile() {
debug-print-function ${FUNCNAME} "$@"
ego_pn_check
set -- env GOPATH="${WORKDIR}/${P}:$(get_golibdir_gopath)" \
GOCACHE="${T}/go-cache" \
go build -v -work -x ${EGO_BUILD_FLAGS} "${EGO_PN}"
echo "$@"
"$@" || die
}
golang-build_src_install() {
debug-print-function ${FUNCNAME} "$@"
ego_pn_check
set -- env GOPATH="${WORKDIR}/${P}:$(get_golibdir_gopath)" \
go install -v -work -x ${EGO_BUILD_FLAGS} "${EGO_PN}"
echo "$@"
"$@" || die
golang_install_pkgs
}
golang-build_src_test() {
debug-print-function ${FUNCNAME} "$@"
ego_pn_check
set -- env GOPATH="${WORKDIR}/${P}:$(get_golibdir_gopath)" \
go test -v -work -x "${EGO_PN}"
echo "$@"
"$@" || die
}
fi

View File

@ -0,0 +1,118 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: golang-vcs-snapshot.eclass
# @MAINTAINER:
# William Hubbs <williamh@gentoo.org>
# @SUPPORTED_EAPIS: 5 6 7
# @BLURB: support eclass for unpacking VCS snapshot tarballs for
# software written in the Go programming language
# @DESCRIPTION:
# This eclass provides a convenience src_unpack() which unpacks the
# first tarball mentioned in SRC_URI to its appropriate location in
# ${WORKDIR}/${P}, treating ${WORKDIR}/${P} as a go workspace.
# Also, it provides a downstream method of vendoring packages.
#
# The location where the tarball is extracted is defined as
# ${WORKDIR}/${P}/src/${EGO_PN}. The location of vendored packages is
# defined as ${WORKDIR}/${P}/src/${EGO_PN%/*}/vendor to match Go's
# vendoring setup.
#
# The typical use case is VCS snapshots coming from github, bitbucket
# and similar services.
#
# Please note that this eclass currently handles only tarballs
# (.tar.gz), but support for more formats may be added in the future.
#
# @EXAMPLE:
#
# @CODE
# EGO_PN=github.com/user/package
# EGO_VENDOR=(
# "github.com/xenolf/lego 6cac0ea7d8b28c889f709ec7fa92e92b82f490dd"
# "golang.org/x/crypto 453249f01cfeb54c3d549ddb75ff152ca243f9d8 github.com/golang/crypto"
# )
#
# inherit golang-vcs-snapshot
#
# SRC_URI="https://github.com/example/${PN}/tarball/v${PV} -> ${P}.tar.gz
# ${EGO_VENDOR_URI}"
# @CODE
#
# The above example will extract the tarball to
# ${WORKDIR}/${P}/src/github.com/user/package
# and add the vendored tarballs to ${WORKDIR}/src/${EGO_PN}/vendor
inherit golang-base
case ${EAPI:-0} in
5|6|7) ;;
*) die "${ECLASS} API in EAPI ${EAPI} not yet established."
esac
EXPORT_FUNCTIONS src_unpack
# @ECLASS-VARIABLE: EGO_VENDOR
# @DESCRIPTION:
# This variable contains a list of vendored packages.
# The items of this array are strings that contain the
# import path and the git commit hash for a vendored package.
# If the import path does not start with github.com, the third argument
# can be used to point to a github repository.
declare -arg EGO_VENDOR
_golang-vcs-snapshot_set_vendor_uri() {
EGO_VENDOR_URI=
local lib
for lib in "${EGO_VENDOR[@]}"; do
lib=(${lib})
if [[ -n ${lib[2]} ]]; then
EGO_VENDOR_URI+=" https://${lib[2]}/archive/${lib[1]}.tar.gz -> ${lib[2]//\//-}-${lib[1]}.tar.gz"
else
EGO_VENDOR_URI+=" https://${lib[0]}/archive/${lib[1]}.tar.gz -> ${lib[0]//\//-}-${lib[1]}.tar.gz"
fi
done
readonly EGO_VENDOR_URI
}
_golang-vcs-snapshot_set_vendor_uri
unset -f _golang-vcs-snapshot_set_vendor_uri
_golang-vcs-snapshot_dovendor() {
local VENDOR_PATH=$1 VENDORPN=$2 TARBALL=$3
rm -fr "${VENDOR_PATH}/${VENDORPN}" || die
mkdir -p "${VENDOR_PATH}/${VENDORPN}" || die
tar -C "${VENDOR_PATH}/${VENDORPN}" -x --strip-components 1\
-f "${DISTDIR}"/${TARBALL} || die
}
# @FUNCTION: golang-vcs-snapshot_src_unpack
# @DESCRIPTION:
# Extract the first archive from ${A} to the appropriate location for GOPATH.
golang-vcs-snapshot_src_unpack() {
local lib vendor_path x
ego_pn_check
set -- ${A}
x="$1"
mkdir -p "${WORKDIR}/${P}/src/${EGO_PN%/...}" || die
tar -C "${WORKDIR}/${P}/src/${EGO_PN%/...}" -x --strip-components 1 \
-f "${DISTDIR}/${x}" || die
if [[ -n "${EGO_VENDOR}" ]]; then
vendor_path="${WORKDIR}/${P}/src/${EGO_PN%/...}/vendor"
mkdir -p "${vendor_path}" || die
for lib in "${EGO_VENDOR[@]}"; do
lib=(${lib})
if [[ -n ${lib[2]} ]]; then
einfo "Vendoring ${lib[0]} ${lib[2]//\//-}-${lib[1]}.tar.gz"
_golang-vcs-snapshot_dovendor "${vendor_path}" ${lib[0]} \
${lib[2]//\//-}-${lib[1]}.tar.gz
else
einfo "Vendoring ${lib[0]} ${lib[0]//\//-}-${lib[1]}.tar.gz"
_golang-vcs-snapshot_dovendor "${vendor_path}" ${lib[0]} \
${lib[0]//\//-}-${lib[1]}.tar.gz
fi
done
fi
}

138
eclass/golang-vcs.eclass Normal file
View File

@ -0,0 +1,138 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: golang-vcs.eclass
# @MAINTAINER:
# William Hubbs <williamh@gentoo.org>
# @SUPPORTED_EAPIS: 5 6 7
# @BLURB: Eclass for fetching and unpacking go repositories.
# @DESCRIPTION:
# This eclass is written to ease the maintenance of live ebuilds
# of software written in the Go programming language.
inherit estack eutils golang-base
case "${EAPI:-0}" in
5|6|7)
;;
*)
die "${ECLASS}: Unsupported eapi (EAPI=${EAPI})"
;;
esac
EXPORT_FUNCTIONS src_unpack
if [[ -z ${_GOLANG_VCS} ]]; then
_GOLANG_VCS=1
PROPERTIES+=" live"
# @ECLASS-VARIABLE: EGO_PN
# @REQUIRED
# @DESCRIPTION:
# This is the import path for the go package(s). Please emerge dev-lang/go
# and read "go help importpath" for syntax.
#
# Example:
# @CODE
# EGO_PN="github.com/user/package"
# EGO_PN="github.com/user1/package1 github.com/user2/package2"
# @CODE
# @ECLASS-VARIABLE: EGO_STORE_DIR
# @DESCRIPTION:
# Storage directory for Go sources.
#
# This is intended to be set by the user in make.conf. Ebuilds must not set
# it.
#
# EGO_STORE_DIR=${DISTDIR}/go-src
# @ECLASS-VARIABLE: EVCS_OFFLINE
# @DEFAULT_UNSET
# @DESCRIPTION:
# If non-empty, this variable prevents any online operations.
# @ECLASS-VARIABLE: EVCS_UMASK
# @DEFAULT_UNSET
# @DESCRIPTION:
# Set this variable to a custom umask. This is intended to be set by
# users. By setting this to something like 002, it can make life easier
# for people who do development as non-root (but are in the portage
# group) and use FEATURES=userpriv.
# @FUNCTION: _golang-vcs_env_setup
# @INTERNAL
# @DESCRIPTION:
# Create EGO_STORE_DIR if necessary.
_golang-vcs_env_setup() {
debug-print-function ${FUNCNAME} "$@"
local distdir=${PORTAGE_ACTUAL_DISTDIR:-${DISTDIR}}
: ${EGO_STORE_DIR:=${distdir}/go-src}
[[ -n ${EVCS_UMASK} ]] && eumask_push $EVCS_UMASK
if [[ ! -d ${EGO_STORE_DIR} ]]; then
(
addwrite /
mkdir -p "${EGO_STORE_DIR}"
) || die "${ECLASS}: unable to create ${EGO_STORE_DIR}"
fi
addwrite "${EGO_STORE_DIR}"
[[ -n ${EVCS_UMASK} ]] && eumask_pop
mkdir -p "${WORKDIR}/${P}/src" ||
die "${ECLASS}: unable to create ${WORKDIR}/${P}"
return 0
}
# @FUNCTION: _golang-vcs_fetch
# @INTERNAL
# @DESCRIPTION:
# Retrieve the EGO_PN go package along with its dependencies.
_golang-vcs_fetch() {
debug-print-function ${FUNCNAME} "$@"
ego_pn_check
if [[ -z ${EVCS_OFFLINE} ]]; then
[[ -n ${EVCS_UMASK} ]] && eumask_push ${EVCS_UMASK}
set -- env GOPATH="${EGO_STORE_DIR}" go get -d -t -u -v -x "${EGO_PN}"
echo "$@"
"$@" || die
# The above dies if you pass repositories in EGO_PN instead of
# packages, e.g. golang.org/x/tools instead of golang.org/x/tools/cmd/vet.
# This is being discussed in the following upstream issue:
# https://github.com/golang/go/issues/11090
[[ -n ${EVCS_UMASK} ]] && eumask_pop
fi
local go_srcpath="${WORKDIR}/${P}/src/${EGO_PN%/...}"
set -- mkdir -p "${go_srcpath}"
echo "$@"
"$@" || die "Unable to create ${go_srcpath}"
set -- cp -r "${EGO_STORE_DIR}/src/${EGO_PN%/...}" \
"${go_srcpath}/.."
echo "$@"
"$@" || die "Unable to copy sources to ${go_srcpath}"
return 0
}
golang-vcs_src_fetch() {
debug-print-function ${FUNCNAME} "$@"
_golang-vcs_env_setup
_golang-vcs_fetch
}
golang-vcs_src_unpack() {
debug-print-function ${FUNCNAME} "$@"
golang-vcs_src_fetch
}
fi

268
eclass/gstreamer.eclass Normal file
View File

@ -0,0 +1,268 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: gstreamer.eclass
# @MAINTAINER:
# gstreamer@gentoo.org
# @AUTHOR:
# Michał Górny <mgorny@gentoo.org>
# Gilles Dartiguelongue <eva@gentoo.org>
# Saleem Abdulrasool <compnerd@gentoo.org>
# foser <foser@gentoo.org>
# zaheerm <zaheerm@gentoo.org>
# @SUPPORTED_EAPIS: 5 6
# @BLURB: Helps building core & split gstreamer plugins.
# @DESCRIPTION:
# Eclass to make external gst-plugins emergable on a per-plugin basis
# and to solve the problem with gst-plugins generating far too much
# unneeded dependencies.
#
# GStreamer consuming applications should depend on the specific plugins
# they need as defined in their source code. Usually you can find that
# out by grepping the source tree for 'factory_make'. If it uses playbin
# plugin, consider adding media-plugins/gst-plugins-meta dependency, but
# also list any packages that provide explicitly requested plugins.
inherit eutils ltprune multilib multilib-minimal toolchain-funcs versionator xdg-utils
case "${EAPI:-0}" in
5|6)
;;
0|1|2|3|4)
die "EAPI=\"${EAPI:-0}\" is not supported anymore"
;;
*)
die "EAPI=\"${EAPI}\" is not supported yet"
;;
esac
# @ECLASS-VARIABLE: GST_PLUGINS_BUILD
# @DESCRIPTION:
# Defines the plugins to be built.
# May be set by an ebuild and contain more than one indentifier, space
# seperated (only src_configure can handle mutiple plugins at this time).
: ${GST_PLUGINS_BUILD:=${PN/gst-plugins-/}}
# @ECLASS-VARIABLE: GST_PLUGINS_BUILD_DIR
# @DESCRIPTION:
# Actual build directory of the plugin.
# Most often the same as the configure switch name.
: ${GST_PLUGINS_BUILD_DIR:=${PN/gst-plugins-/}}
# @ECLASS-VARIABLE: GST_TARBALL_SUFFIX
# @DESCRIPTION:
# Most projects hosted on gstreamer.freedesktop.org mirrors provide
# tarballs as tar.bz2 or tar.xz. This eclass defaults to xz. This is
# because the gstreamer mirrors are moving to only have xz tarballs for
# new releases.
: ${GST_TARBALL_SUFFIX:="xz"}
# Even though xz-utils are in @system, they must still be added to DEPEND; see
# https://archives.gentoo.org/gentoo-dev/msg_a0d4833eb314d1be5d5802a3b710e0a4.xml
if [[ ${GST_TARBALL_SUFFIX} == "xz" ]]; then
DEPEND="${DEPEND} app-arch/xz-utils"
fi
# @ECLASS-VARIABLE: GST_ORG_MODULE
# @DESCRIPTION:
# Name of the module as hosted on gstreamer.freedesktop.org mirrors.
# Leave unset if package name matches module name.
: ${GST_ORG_MODULE:=$PN}
# @ECLASS-VARIABLE: GST_ORG_PVP
# @INTERNAL
# @DESCRIPTION:
# Major and minor numbers of the version number.
: ${GST_ORG_PVP:=$(get_version_component_range 1-2)}
DESCRIPTION="${BUILD_GST_PLUGINS} plugin for gstreamer"
HOMEPAGE="https://gstreamer.freedesktop.org/"
SRC_URI="https://gstreamer.freedesktop.org/src/${GST_ORG_MODULE}/${GST_ORG_MODULE}-${PV}.tar.${GST_TARBALL_SUFFIX}"
LICENSE="GPL-2"
case ${GST_ORG_PVP} in
0.10) SLOT="0.10"; GST_MIN_PV="0.10.36-r2" ;;
1.*) SLOT="1.0"; GST_MIN_PV="1.2.4-r1" ;;
*) die "Unkown gstreamer release."
esac
S="${WORKDIR}/${GST_ORG_MODULE}-${PV}"
RDEPEND="
>=dev-libs/glib-2.38.2-r1:2[${MULTILIB_USEDEP}]
>=media-libs/gstreamer-${GST_MIN_PV}:${SLOT}[${MULTILIB_USEDEP}]
"
DEPEND="
>=sys-apps/sed-4
virtual/pkgconfig
"
# Export common multilib phases.
multilib_src_configure() { gstreamer_multilib_src_configure; }
if [[ ${PN} != ${GST_ORG_MODULE} ]]; then
# Do not run test phase for invididual plugin ebuilds.
RESTRICT="test"
RDEPEND="${RDEPEND}
>=media-libs/${GST_ORG_MODULE}-${PV}:${SLOT}[${MULTILIB_USEDEP}]"
# Export multilib phases used for split builds.
multilib_src_compile() { gstreamer_multilib_src_compile; }
multilib_src_install() { gstreamer_multilib_src_install; }
multilib_src_install_all() { gstreamer_multilib_src_install_all; }
else
IUSE="nls"
DEPEND="${DEPEND} nls? ( >=sys-devel/gettext-0.17 )"
fi
DEPEND="${DEPEND} ${RDEPEND}"
# @FUNCTION: gstreamer_environment_reset
# @INTERNAL
# @DESCRIPTION:
# Clean up environment for clean builds.
# >=dev-lang/orc-0.4.23 rely on environment variables to find a place to
# allocate files to mmap.
gstreamer_environment_reset() {
xdg_environment_reset
}
# @FUNCTION: gstreamer_get_plugins
# @INTERNAL
# @DESCRIPTION:
# Get the list of plugins requiring external dependencies.
gstreamer_get_plugins() {
# Must be called from src_prepare/src_configure
GST_PLUGINS_LIST=$(sed -rn 's/^AG_GST_CHECK_FEATURE\((\w+),.*/ \1 /p' \
"${ECONF_SOURCE:-${S}}"/configure.* | LC_ALL='C' tr '[:upper:]' '[:lower:]')
}
# @FUNCTION: gstreamer_get_plugin_dir
# @USAGE: [build_dir]
# @INTERNAL
# @DESCRIPTION:
# Finds plugin build directory and output it.
# Defaults to ${GST_PLUGINS_BUILD_DIR} if argument is not provided
gstreamer_get_plugin_dir() {
local build_dir=${1:-${GST_PLUGINS_BUILD_DIR}}
if [[ ! -d ${S}/ext/${build_dir} ]]; then
if [[ ! -d ${S}/sys/${build_dir} ]]; then
ewarn "No such plugin directory"
die
fi
einfo "Building system plugin in ${build_dir}..." >&2
echo sys/${build_dir}
else
einfo "Building external plugin in ${build_dir}..." >&2
echo ext/${build_dir}
fi
}
# @FUNCTION: gstreamer_system_link
# @USAGE: <gst-libs/gst/audio:gstreamer-audio> [...]
# @DESCRIPTION:
# Walks through makefiles in order to make sure build will link against system
# libraries.
# Takes a list of path fragments and corresponding pkgconfig libraries
# separated by colon (:). Will replace the path fragment by the output of
# pkgconfig.
gstreamer_system_link() {
local pdir directory libs pkgconfig pc tuple
pkgconfig=$(tc-getPKG_CONFIG)
for plugin_dir in ${GST_PLUGINS_BUILD_DIR} ; do
pdir=$(gstreamer_get_plugin_dir ${plugin_dir})
for tuple in $@ ; do
directory=${tuple%:*}
pc=${tuple#*:}-${SLOT}
libs="$(${pkgconfig} --libs-only-l ${pc} || die)"
sed -e "s:\$(top_builddir)/${directory}/.*\.la:${libs}:" \
-i "${pdir}"/Makefile.{am,in} || die
done
done
}
# @FUNCTION: gstreamer_multilib_src_configure
# @DESCRIPTION:
# Handles logic common to configuring gstreamer plugins
gstreamer_multilib_src_configure() {
local plugin gst_conf=() ECONF_SOURCE=${ECONF_SOURCE:-${S}}
gstreamer_get_plugins
gstreamer_environment_reset
for plugin in ${GST_PLUGINS_LIST} ; do
if has ${plugin} ${GST_PLUGINS_BUILD} ; then
gst_conf+=( --enable-${plugin} )
else
gst_conf+=( --disable-${plugin} )
fi
done
if grep -q "ORC_CHECK" "${ECONF_SOURCE}"/configure.* ; then
if in_iuse orc ; then
gst_conf+=( $(use_enable orc) )
else
gst_conf+=( --disable-orc )
fi
fi
if grep -q "AM_MAINTAINER_MODE" "${ECONF_SOURCE}"/configure.* ; then
gst_conf+=( --disable-maintainer-mode )
fi
if grep -q "disable-schemas-compile" "${ECONF_SOURCE}"/configure ; then
gst_conf+=( --disable-schemas-compile )
fi
if [[ ${PN} == ${GST_ORG_MODULE} ]]; then
gst_conf+=( $(use_enable nls) )
fi
einfo "Configuring to build ${GST_PLUGINS_BUILD} plugin(s) ..."
econf \
--with-package-name="Gentoo GStreamer ebuild" \
--with-package-origin="https://www.gentoo.org" \
"${gst_conf[@]}" "${@}"
}
# @FUNCTION: gstreamer_multilib_src_compile
# @DESCRIPTION:
# Compiles requested gstreamer plugin.
gstreamer_multilib_src_compile() {
local plugin_dir
for plugin_dir in ${GST_PLUGINS_BUILD_DIR} ; do
emake -C "$(gstreamer_get_plugin_dir ${plugin_dir})"
done
}
# @FUNCTION: gstreamer_multilib_src_install
# @DESCRIPTION:
# Installs requested gstreamer plugin.
gstreamer_multilib_src_install() {
local plugin_dir
for plugin_dir in ${GST_PLUGINS_BUILD_DIR} ; do
emake -C "$(gstreamer_get_plugin_dir ${plugin_dir})" \
DESTDIR="${D}" install
done
}
# @FUNCTION: gstreamer_multilib_src_install_all
# @DESCRIPTION:
# Installs documentation for requested gstreamer plugin, and removes .la
# files.
gstreamer_multilib_src_install_all() {
local plugin_dir
for plugin_dir in ${GST_PLUGINS_BUILD_DIR} ; do
local dir=$(gstreamer_get_plugin_dir ${plugin_dir})
[[ -e ${dir}/README ]] && dodoc "${dir}"/README
done
prune_libtool_files --modules
}

771
eclass/haskell-cabal.eclass Normal file
View File

@ -0,0 +1,771 @@
# Copyright 1999-2019 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: haskell-cabal.eclass
# @MAINTAINER:
# Haskell herd <haskell@gentoo.org>
# @AUTHOR:
# Original author: Andres Loeh <kosmikus@gentoo.org>
# Original author: Duncan Coutts <dcoutts@gentoo.org>
# @BLURB: for packages that make use of the Haskell Common Architecture for Building Applications and Libraries (cabal)
# @DESCRIPTION:
# Basic instructions:
#
# Before inheriting the eclass, set CABAL_FEATURES to
# reflect the tools and features that the package makes
# use of.
#
# Currently supported features:
# haddock -- for documentation generation
# hscolour -- generation of colourised sources
# hoogle -- generation of documentation search index
# profile -- if package supports to build profiling-enabled libraries
# bootstrap -- only used for the cabal package itself
# lib -- the package installs libraries
# nocabaldep -- don't add dependency on cabal.
# only used for packages that _must_ not pull the dependency
# on cabal, but still use this eclass (e.g. haskell-updater).
# ghcdeps -- constraint dependency on package to ghc onces
# only used for packages that use libghc internally and _must_
# not pull upper versions
# test-suite -- add support for cabal test-suites (introduced in Cabal-1.8)
# rebuild-after-doc-workaround -- enable doctest test failue workaround.
# Symptom: when `./setup haddock` is run in a `build-type: Custom`
# package it might cause cause the test-suite to fail with
# errors like:
# > <command line>: cannot satisfy -package-id singletons-2.7-3Z7pnljD8tU1NrslJodXmr
# Workaround re-reginsters the package to avoid the failure
# (and rebuilds changes).
# FEATURE can be removed once https://github.com/haskell/cabal/issues/7213
# is fixed.
inherit eutils ghc-package multilib toolchain-funcs
# @ECLASS-VARIABLE: CABAL_EXTRA_CONFIGURE_FLAGS
# @DESCRIPTION:
# User-specified additional parameters passed to 'setup configure'.
# example: /etc/portage/make.conf:
# CABAL_EXTRA_CONFIGURE_FLAGS="--enable-shared --enable-executable-dynamic"
: ${CABAL_EXTRA_CONFIGURE_FLAGS:=}
# @ECLASS-VARIABLE: CABAL_EXTRA_BUILD_FLAGS
# @DESCRIPTION:
# User-specified additional parameters passed to 'setup build'.
# example: /etc/portage/make.conf: CABAL_EXTRA_BUILD_FLAGS=-v
: ${CABAL_EXTRA_BUILD_FLAGS:=}
# @ECLASS-VARIABLE: GHC_BOOTSTRAP_FLAGS
# @DESCRIPTION:
# User-specified additional parameters for ghc when building
# _only_ 'setup' binary bootstrap.
# example: /etc/portage/make.conf: GHC_BOOTSTRAP_FLAGS=-dynamic to make
# linking 'setup' faster.
: ${GHC_BOOTSTRAP_FLAGS:=}
# @ECLASS-VARIABLE: CABAL_EXTRA_HADDOCK_FLAGS
# @DESCRIPTION:
# User-specified additional parameters passed to 'setup haddock'.
# example: /etc/portage/make.conf:
# CABAL_EXTRA_HADDOCK_FLAGS="--haddock-options=--latex --haddock-options=--pretty-html"
: ${CABAL_EXTRA_HADDOCK_FLAGS:=}
# @ECLASS-VARIABLE: CABAL_EXTRA_HOOGLE_FLAGS
# @DESCRIPTION:
# User-specified additional parameters passed to 'setup haddock --hoogle'.
# example: /etc/portage/make.conf:
# CABAL_EXTRA_HOOGLE_FLAGS="--haddock-options=--show-all"
: ${CABAL_EXTRA_HOOGLE_FLAGS:=}
# @ECLASS-VARIABLE: CABAL_EXTRA_HSCOLOUR_FLAGS
# @DESCRIPTION:
# User-specified additional parameters passed to 'setup hscolour'.
# example: /etc/portage/make.conf:
# CABAL_EXTRA_HSCOLOUR_FLAGS="--executables --tests"
: ${CABAL_EXTRA_HSCOLOUR_FLAGS:=}
# @ECLASS-VARIABLE: CABAL_EXTRA_TEST_FLAGS
# @DESCRIPTION:
# User-specified additional parameters passed to 'setup test'.
# example: /etc/portage/make.conf:
# CABAL_EXTRA_TEST_FLAGS="-v3 --show-details=streaming"
: ${CABAL_EXTRA_TEST_FLAGS:=}
# @ECLASS-VARIABLE: CABAL_DEBUG_LOOSENING
# @DESCRIPTION:
# Show debug output for 'cabal_chdeps' function if set.
# Needs working 'diff'.
: ${CABAL_DEBUG_LOOSENING:=}
# @ECLASS-VARIABLE: CABAL_REPORT_OTHER_BROKEN_PACKAGES
# @DESCRIPTION:
# Show other broken packages if 'cabal configure' fails.
# It should be normally enabled unless you know you are about
# to try to compile a lot of broken packages. Default value: 'yes'
# Set to anything else to disable.
: ${CABAL_REPORT_OTHER_BROKEN_PACKAGES:=yes}
HASKELL_CABAL_EXPF="pkg_setup src_compile src_test src_install pkg_postinst pkg_postrm"
# 'dev-haskell/cabal' passes those options with ./configure-based
# configuration, but most packages don't need/don't accept it:
# #515362, #515362
QA_CONFIGURE_OPTIONS+=" --with-compiler --with-hc --with-hc-pkg --with-gcc"
case "${EAPI:-0}" in
2|3|4|5|6|7) HASKELL_CABAL_EXPF+=" src_configure" ;;
*) ;;
esac
EXPORT_FUNCTIONS ${HASKELL_CABAL_EXPF}
for feature in ${CABAL_FEATURES}; do
case ${feature} in
haddock) CABAL_USE_HADDOCK=yes;;
hscolour) CABAL_USE_HSCOLOUR=yes;;
hoogle) CABAL_USE_HOOGLE=yes;;
profile) CABAL_USE_PROFILE=yes;;
bootstrap) CABAL_BOOTSTRAP=yes;;
lib) CABAL_HAS_LIBRARIES=yes;;
nocabaldep) CABAL_FROM_GHC=yes;;
ghcdeps) CABAL_GHC_CONSTRAINT=yes;;
test-suite) CABAL_TEST_SUITE=yes;;
rebuild-after-doc-workaround) CABAL_REBUILD_AFTER_DOC_WORKAROUND=yes;;
# does nothing, removed 2016-09-04
bin) ;;
*) CABAL_UNKNOWN="${CABAL_UNKNOWN} ${feature}";;
esac
done
if [[ -n "${CABAL_USE_HADDOCK}" ]]; then
IUSE="${IUSE} doc"
fi
if [[ -n "${CABAL_USE_HSCOLOUR}" ]]; then
IUSE="${IUSE} hscolour"
DEPEND="${DEPEND} hscolour? ( dev-haskell/hscolour )"
fi
if [[ -n "${CABAL_USE_HOOGLE}" ]]; then
# enabled only in ::haskell
#IUSE="${IUSE} hoogle"
CABAL_USE_HOOGLE=
fi
if [[ -n "${CABAL_USE_PROFILE}" ]]; then
IUSE="${IUSE} profile"
fi
if [[ -n "${CABAL_TEST_SUITE}" ]]; then
IUSE="${IUSE} test"
RESTRICT+=" !test? ( test )"
fi
# returns the version of cabal currently in use.
# Rarely it's handy to pin cabal version from outside.
: ${_CABAL_VERSION_CACHE:=""}
cabal-version() {
if [[ -z "${_CABAL_VERSION_CACHE}" ]]; then
if [[ "${CABAL_BOOTSTRAP}" ]]; then
# We're bootstrapping cabal, so the cabal version is the version
# of this package itself.
_CABAL_VERSION_CACHE="${PV}"
elif [[ "${CABAL_FROM_GHC}" ]]; then
_CABAL_VERSION_CACHE="$(ghc-cabal-version)"
else
# We ask portage, not ghc, so that we only pick up
# portage-installed cabal versions.
_CABAL_VERSION_CACHE="$(ghc-extractportageversion dev-haskell/cabal)"
fi
fi
echo "${_CABAL_VERSION_CACHE}"
}
cabal-bootstrap() {
local setupmodule
local cabalpackage
local setup_bootstrap_args=()
if [[ -f "${S}/Setup.lhs" ]]; then
setupmodule="${S}/Setup.lhs"
elif [[ -f "${S}/Setup.hs" ]]; then
setupmodule="${S}/Setup.hs"
else
eqawarn "No Setup.lhs or Setup.hs found. Either add Setup.hs to package or call cabal-mksetup from ebuild"
cabal-mksetup
setupmodule="${S}/Setup.hs"
fi
# We build the setup program using the latest version of
# cabal that we have installed
cabalpackage=Cabal-$(cabal-version)
einfo "Using cabal-$(cabal-version)."
if $(ghc-supports-threaded-runtime); then
# Cabal has a bug that deadlocks non-threaded RTS:
# https://bugs.gentoo.org/537500
# https://github.com/haskell/cabal/issues/2398
setup_bootstrap_args+=(-threaded)
fi
make_setup() {
set -- -package "${cabalpackage}" --make "${setupmodule}" \
$(ghc-make-args) \
"${setup_bootstrap_args[@]}" \
${HCFLAGS} \
${GHC_BOOTSTRAP_FLAGS} \
"$@" \
-o setup
echo $(ghc-getghc) "$@"
$(ghc-getghc) "$@"
}
if $(ghc-supports-shared-libraries); then
# # some custom build systems might use external libraries,
# # for which we don't have shared libs, so keep static fallback
# bug #411789, http://hackage.haskell.org/trac/ghc/ticket/5743#comment:3
# http://hackage.haskell.org/trac/ghc/ticket/7062
# http://hackage.haskell.org/trac/ghc/ticket/3072
# ghc does not set RPATH for extralibs, thus we do it ourselves by hands
einfo "Prepending $(ghc-libdir) to LD_LIBRARY_PATH"
if [[ ${CHOST} != *-darwin* ]]; then
LD_LIBRARY_PATH="$(ghc-libdir)${LD_LIBRARY_PATH:+:}${LD_LIBRARY_PATH}"
export LD_LIBRARY_PATH
else
DYLD_LIBRARY_PATH="$(ghc-libdir)${DYLD_LIBRARY_PATH:+:}${DYLD_LIBRARY_PATH}"
export DYLD_LIBRARY_PATH
fi
{ make_setup -dynamic "$@" && ./setup --help >/dev/null; } ||
make_setup "$@" || die "compiling ${setupmodule} failed"
else
make_setup "$@" || die "compiling ${setupmodule} failed"
fi
}
cabal-mksetup() {
local setupdir=${1:-${S}}
local setup_src=${setupdir}/Setup.hs
rm -vf "${setupdir}"/Setup.{lhs,hs}
elog "Creating 'Setup.hs' for 'Simple' build type."
echo 'import Distribution.Simple; main = defaultMain' \
> "${setup_src}" || die "failed to create default Setup.hs"
}
haskell-cabal-run_verbose() {
echo "$@"
"$@" || die "failed: $@"
}
cabal-hscolour() {
haskell-cabal-run_verbose ./setup hscolour "$@"
}
cabal-haddock() {
haskell-cabal-run_verbose ./setup haddock "$@"
}
cabal-die-if-nonempty() {
local breakage_type=$1
shift
[[ "${#@}" == 0 ]] && return 0
eerror "Detected ${breakage_type} packages: ${@}"
die "//==-- Please, run 'haskell-updater' to fix ${breakage_type} packages --==//"
}
cabal-show-brokens() {
[[ ${CABAL_REPORT_OTHER_BROKEN_PACKAGES} != yes ]] && return 0
elog "ghc-pkg check: 'checking for other broken packages:'"
# pretty-printer
$(ghc-getghcpkg) check 2>&1 \
| egrep -v '^Warning: haddock-(html|interfaces): ' \
| egrep -v '^Warning: include-dirs: ' \
| head -n 20
cabal-die-if-nonempty 'broken' \
$($(ghc-getghcpkg) check --simple-output)
}
cabal-show-old() {
[[ ${CABAL_REPORT_OTHER_BROKEN_PACKAGES} != yes ]] && return 0
cabal-die-if-nonempty 'outdated' \
$("${EPREFIX}"/usr/sbin/haskell-updater --quiet --upgrade --list-only)
}
cabal-show-brokens-and-die() {
cabal-show-brokens
cabal-show-old
die "$@"
}
cabal-configure() {
local cabalconf=()
has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
if [[ -n "${CABAL_USE_HADDOCK}" ]] && use doc; then
# We use the bundled with GHC version if exists
# Haddock is very picky about index files
# it generates for ghc's base and other packages.
local p=${EPREFIX}/usr/bin/haddock-ghc-$(ghc-version)
if [[ -f $p ]]; then
cabalconf+=(--with-haddock="${p}")
else
cabalconf+=(--with-haddock=${EPREFIX}/usr/bin/haddock)
fi
fi
if [[ -n "${CABAL_USE_PROFILE}" ]] && use profile; then
cabalconf+=(--enable-library-profiling)
fi
if [[ -n "${CABAL_TEST_SUITE}" ]]; then
cabalconf+=($(use_enable test tests))
fi
if [[ -n "${CABAL_GHC_CONSTRAINT}" ]]; then
cabalconf+=($(cabal-constraint "ghc"))
fi
cabalconf+=(--ghc-options="$(ghc-make-args)")
local option
for option in ${HCFLAGS}
do
cabalconf+=(--ghc-option="$option")
done
# toolchain
cabalconf+=(--with-ar="$(tc-getAR)")
# Building GHCi libs on ppc64 causes "TOC overflow".
if use ppc64; then
cabalconf+=(--disable-library-for-ghci)
fi
# currently cabal does not respect CFLAGS and LDFLAGS on it's own (bug #333217)
# so translate LDFLAGS to ghc parameters (with mild filtering).
local flag
for flag in $CFLAGS; do
case "${flag}" in
-flto|-flto=*)
# binutils does not support partial linking yet:
# https://github.com/gentoo-haskell/gentoo-haskell/issues/1110
# https://sourceware.org/PR12291
einfo "Filter '${flag}' out of CFLAGS (avoid lto partial linking)"
continue
;;
esac
cabalconf+=(--ghc-option="-optc$flag")
done
for flag in $LDFLAGS; do
case "${flag}" in
-flto|-flto=*)
# binutils does not support partial linking yet:
# https://github.com/gentoo-haskell/gentoo-haskell/issues/1110
# https://sourceware.org/PR12291
einfo "Filter '${flag}' out of LDFLAGS (avoid lto partial linking)"
continue
;;
esac
cabalconf+=(--ghc-option="-optl$flag")
done
# disable executable stripping for the executables, as portage will
# strip by itself, and pre-stripping gives a QA warning.
# cabal versions previous to 1.4 does not strip executables, and does
# not accept the flag.
# this fixes numerous bugs, amongst them;
# bug #251881, bug #251882, bug #251884, bug #251886, bug #299494
cabalconf+=(--disable-executable-stripping)
cabalconf+=(--docdir="${EPREFIX}"/usr/share/doc/${PF})
# As of Cabal 1.2, configure is quite quiet. For diagnostic purposes
# it's better if the configure chatter is in the build logs:
cabalconf+=(--verbose)
# We build shared version of our Cabal where ghc ships it's shared
# version of it. We will link ./setup as dynamic binary againt Cabal later.
[[ ${CATEGORY}/${PN} == "dev-haskell/cabal" ]] && \
$(ghc-supports-shared-libraries) && \
cabalconf+=(--enable-shared)
if $(ghc-supports-shared-libraries); then
# Experimental support for dynamically linked binaries.
# We are enabling it since 7.10.1_rc3
if ver_test "$(ghc-version)" -ge "7.10.0.20150316"; then
# we didn't enable it before as it was not stable on all arches
cabalconf+=(--enable-shared)
# Known to break on ghc-7.8/Cabal-1.18
# https://ghc.haskell.org/trac/ghc/ticket/9625
cabalconf+=(--enable-executable-dynamic)
fi
fi
# --sysconfdir appeared in Cabal-1.18+
if ./setup configure --help | grep -q -- --sysconfdir; then
cabalconf+=(--sysconfdir="${EPREFIX}"/etc)
fi
# appeared in Cabal-1.18+ (see '--disable-executable-stripping')
if ./setup configure --help | grep -q -- --disable-library-stripping; then
cabalconf+=(--disable-library-stripping)
fi
set -- configure \
--ghc --prefix="${EPREFIX}"/usr \
--with-compiler="$(ghc-getghc)" \
--with-hc-pkg="$(ghc-getghcpkg)" \
--prefix="${EPREFIX}"/usr \
--libdir="${EPREFIX}"/usr/$(get_libdir) \
--libsubdir=${P}/ghc-$(ghc-version) \
--datadir="${EPREFIX}"/usr/share/ \
--datasubdir=${P}/ghc-$(ghc-version) \
"${cabalconf[@]}" \
${CABAL_CONFIGURE_FLAGS} \
"$@" \
${CABAL_EXTRA_CONFIGURE_FLAGS}
echo ./setup "$@"
./setup "$@" || cabal-show-brokens-and-die "setup configure failed"
}
cabal-build() {
set -- build "$@" ${CABAL_EXTRA_BUILD_FLAGS}
echo ./setup "$@"
./setup "$@" \
|| die "setup build failed"
}
cabal-copy() {
has "${EAPI:-0}" 0 1 2 && ! use prefix && ED=${D}
set -- copy --destdir="${D}" "$@"
echo ./setup "$@"
./setup "$@" || die "setup copy failed"
# cabal is a bit eager about creating dirs,
# so remove them if they are empty
rmdir "${ED}/usr/bin" 2> /dev/null
}
cabal-pkg() {
# This does not actually register since we're using true instead
# of ghc-pkg. So it just leaves the .conf file and we can
# register that ourselves (if it exists).
if [[ -n ${CABAL_HAS_LIBRARIES} ]]; then
# Newer cabal can generate a package conf for us:
./setup register --gen-pkg-config="${T}/${P}.conf"
if [[ -d "${T}/${P}.conf" ]]; then
ghc-install-pkg "${T}/${P}.conf"/*
else
ghc-install-pkg "${T}/${P}.conf"
fi
fi
}
# Some cabal libs are bundled along with some versions of ghc
# eg filepath-1.0 comes with ghc-6.6.1
# by putting CABAL_CORE_LIB_GHC_PV="6.6.1" in an ebuild we are declaring that
# when building with this version of ghc, the ebuild is a dummy that is it will
# install no files since the package is already included with ghc.
# However portage still records the dependency and we can upgrade the package
# to a later one that's not included with ghc.
# You can also put a space separated list, eg CABAL_CORE_LIB_GHC_PV="6.6 6.6.1".
# Those versions are taken as-is from ghc `--numeric-version`.
# Package manager versions are also supported:
# CABAL_CORE_LIB_GHC_PV="7.10.* PM:7.8.4-r1".
cabal-is-dummy-lib() {
local bin_ghc_version=$(ghc-version)
local pm_ghc_version=$(ghc-pm-version)
for version in ${CABAL_CORE_LIB_GHC_PV}; do
[[ "${bin_ghc_version}" == ${version} ]] && return 0
[[ "${pm_ghc_version}" == ${version} ]] && return 0
done
return 1
}
# exported function: check if cabal is correctly installed for
# the currently active ghc (we cannot guarantee this with portage)
haskell-cabal_pkg_setup() {
if [[ -n ${CABAL_HAS_LIBRARIES} ]]; then
[[ ${RDEPEND} == *dev-lang/ghc* ]] || eqawarn "QA Notice: A library does not have runtime dependency on dev-lang/ghc."
fi
if [[ -n "${CABAL_UNKNOWN}" ]]; then
eqawarn "QA Notice: Unknown entry in CABAL_FEATURES: ${CABAL_UNKNOWN}"
fi
if cabal-is-dummy-lib; then
einfo "${P} is included in ghc-${CABAL_CORE_LIB_GHC_PV}, nothing to install."
fi
}
haskell-cabal_src_configure() {
cabal-is-dummy-lib && return
pushd "${S}" > /dev/null || die
cabal-bootstrap
cabal-configure "$@"
popd > /dev/null || die
}
# exported function: nice alias
cabal_src_configure() {
haskell-cabal_src_configure "$@"
}
# exported function: cabal-style bootstrap configure and compile
cabal_src_compile() {
# it's a common mistake when one bumps ebuild to EAPI="2" (and upper)
# and forgets to separate src_compile() to src_configure()/src_compile().
# Such error leads to default src_configure and we lose all passed flags.
if ! has "${EAPI:-0}" 0 1; then
local passed_flag
for passed_flag in "$@"; do
[[ ${passed_flag} == --flags=* ]] && \
eqawarn "QA Notice: Cabal option '${passed_flag}' has effect only in src_configure()"
done
fi
cabal-is-dummy-lib && return
has src_configure ${HASKELL_CABAL_EXPF} || haskell-cabal_src_configure "$@"
cabal-build
if [[ -n "$CABAL_USE_HADDOCK" ]] && use doc; then
if [[ -n "$CABAL_USE_HSCOLOUR" ]] && use hscolour; then
# --hyperlink-source implies calling 'setup hscolour'
haddock_args+=(--hyperlink-source)
fi
cabal-haddock "${haddock_args[@]}" $CABAL_EXTRA_HADDOCK_FLAGS
if [[ -n "$CABAL_USE_HOOGLE" ]] && use hoogle; then
cabal-haddock --hoogle $CABAL_EXTRA_HOOGLE_FLAGS
fi
if [[ -n "${CABAL_REBUILD_AFTER_DOC_WORKAROUND}" ]]; then
ewarn "rebuild-after-doc-workaround is enabled. This is a"
ewarn "temporary worakround to deal with https://github.com/haskell/cabal/issues/7213"
ewarn "until the upstream issue can be resolved."
cabal-build
fi
else
if [[ -n "$CABAL_USE_HSCOLOUR" ]] && use hscolour; then
cabal-hscolour $CABAL_EXTRA_HSCOLOUR_FLAGS
fi
if [[ -n "$CABAL_USE_HOOGLE" ]] && use hoogle; then
ewarn "hoogle USE flag requires doc USE flag, building without hoogle"
fi
fi
}
haskell-cabal_src_compile() {
pushd "${S}" > /dev/null || die
cabal_src_compile "$@"
popd > /dev/null || die
}
haskell-cabal_src_test() {
local cabaltest=()
pushd "${S}" > /dev/null || die
if cabal-is-dummy-lib; then
einfo ">>> No tests for dummy library: ${CATEGORY}/${PF}"
else
einfo ">>> Test phase [cabal test]: ${CATEGORY}/${PF}"
# '--show-details=streaming' appeared in Cabal-1.20
if ./setup test --help | grep -q -- "'streaming'"; then
cabaltest+=(--show-details=streaming)
fi
set -- test \
"${cabaltest[@]}" \
${CABAL_TEST_FLAGS} \
"$@" \
${CABAL_EXTRA_TEST_FLAGS}
echo ./setup "$@"
./setup "$@" || die "cabal test failed"
fi
popd > /dev/null || die
}
# exported function: cabal-style copy and register
cabal_src_install() {
has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
if ! cabal-is-dummy-lib; then
cabal-copy
cabal-pkg
fi
# create a dummy local package conf file for haskell-updater
# if it does not exist (dummy libraries and binaries w/o libraries)
local ghc_confdir_with_prefix="$(ghc-confdir)"
# remove EPREFIX
dodir ${ghc_confdir_with_prefix#${EPREFIX}}
local hint_db="${D}/$(ghc-confdir)"
local hint_file="${hint_db}/gentoo-empty-${CATEGORY}-${PF}.conf"
mkdir -p "${hint_db}" || die
touch "${hint_file}" || die
}
haskell-cabal_src_install() {
pushd "${S}" > /dev/null || die
cabal_src_install
popd > /dev/null || die
}
haskell-cabal_pkg_postinst() {
ghc-package_pkg_postinst
}
haskell-cabal_pkg_postrm() {
ghc-package_pkg_postrm
}
# @FUNCTION: cabal_flag
# @DESCRIPTION:
# ebuild.sh:use_enable() taken as base
#
# Usage examples:
#
# CABAL_CONFIGURE_FLAGS=$(cabal_flag gui)
# leads to "--flags=gui" or "--flags=-gui" (useflag 'gui')
#
# CABAL_CONFIGURE_FLAGS=$(cabal_flag gtk gui)
# also leads to "--flags=gui" or " --flags=-gui" (useflag 'gtk')
#
cabal_flag() {
if [[ -z "$1" ]]; then
echo "!!! cabal_flag() called without a parameter." >&2
echo "!!! cabal_flag() <USEFLAG> [<cabal_flagname>]" >&2
return 1
fi
local UWORD=${2:-$1}
if use "$1"; then
echo "--flags=${UWORD}"
else
echo "--flags=-${UWORD}"
fi
return 0
}
# @FUNCTION: cabal_chdeps
# @DESCRIPTION:
# Allows easier patching of $CABAL_FILE (${S}/${PN}.cabal by default)
# depends
#
# Accepts argument list as pairs of substitutions: <from-string> <to-string>...
#
# Dies on error.
#
# Usage examples:
#
# src_prepare() {
# cabal_chdeps \
# 'base >= 4.2 && < 4.6' 'base >= 4.2 && < 4.7' \
# 'containers ==0.4.*' 'containers >= 0.4 && < 0.6'
#}
# or
# src_prepare() {
# CABAL_FILE=${S}/${MY_PN}.cabal cabal_chdeps \
# 'base >= 4.2 && < 4.6' 'base >= 4.2 && < 4.7'
# CABAL_FILE=${S}/${MY_PN}-tools.cabal cabal_chdeps \
# 'base == 3.*' 'base >= 4.2 && < 4.7'
#}
#
cabal_chdeps() {
local cabal_fn=${MY_PN:-${PN}}.cabal
local cf=${CABAL_FILE:-${S}/${cabal_fn}}
local from_ss # ss - substring
local to_ss
local orig_c # c - contents
local new_c
[[ -f $cf ]] || die "cabal file '$cf' does not exist"
orig_c=$(< "$cf")
while :; do
from_pat=$1
to_str=$2
[[ -n ${from_pat} ]] || break
[[ -n ${to_str} ]] || die "'${from_str}' does not have 'to' part"
einfo "CHDEP: '${from_pat}' -> '${to_str}'"
# escape pattern-like symbols
from_pat=${from_pat//\*/\\*}
from_pat=${from_pat//\[/\\[}
new_c=${orig_c//${from_pat}/${to_str}}
if [[ -n $CABAL_DEBUG_LOOSENING ]]; then
echo "${orig_c}" >"${T}/${cf}".pre
echo "${new_c}" >"${T}/${cf}".post
diff -u "${T}/${cf}".{pre,post}
fi
[[ "${orig_c}" == "${new_c}" ]] && die "no trigger for '${from_pat}'"
orig_c=${new_c}
shift
shift
done
echo "${new_c}" > "$cf" ||
die "failed to update"
}
# @FUNCTION: cabal-constraint
# @DESCRIPTION:
# Allowes to set contraint to the libraries that are
# used by specified package
cabal-constraint() {
while read p v ; do
echo "--constraint \"$p == $v\""
done < $(ghc-pkgdeps ${1})
}
# @FUNCTION: replace-hcflags
# @USAGE: <old> <new>
# @DESCRIPTION:
# Replace the <old> flag with <new> in HCFLAGS. Accepts shell globs for <old>.
# The implementation is picked from flag-o-matic.eclass:replace-flags()
replace-hcflags() {
[[ $# != 2 ]] && die "Usage: replace-hcflags <old flag> <new flag>"
local f new=()
for f in ${HCFLAGS} ; do
# Note this should work with globs like -O*
if [[ ${f} == ${1} ]]; then
einfo "HCFLAGS: replacing '${f}' to '${2}'"
f=${2}
fi
new+=( "${f}" )
done
export HCFLAGS="${new[*]}"
return 0
}

431
eclass/java-ant-2.eclass Normal file
View File

@ -0,0 +1,431 @@
# Copyright 2004-2019 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: java-ant-2.eclass
# @MAINTAINER:
# java@gentoo.org
# @AUTHOR:
# kiorky (kiorky@cryptelium.net), Petteri Räty (betelgeuse@gentoo.org)
# @BLURB: eclass for ant based Java packages
# @DESCRIPTION:
# Eclass for Ant-based Java packages. Provides support for both automatic and
# manual manipulation of build.xml files. Should be inherited after java-pkg-2
# or java-pkg-opt-2 eclass.
inherit java-utils-2 multilib
# This eclass provides functionality for Java packages which use
# ant to build. In particular, it will attempt to fix build.xml files, so that
# they use the appropriate 'target' and 'source' attributes.
# @ECLASS-VARIABLE: WANT_ANT_TASKS
# @DEFAULT_UNSET
# @DESCRIPTION:
# An $IFS separated list of ant tasks.
# Ebuild can specify this variable before inheriting java-ant-2 eclass to
# determine ANT_TASKS it needs. They will be automatically translated to
# DEPEND variable and ANT_TASKS variable. JAVA_PKG_FORCE_ANT_TASKS can override
# ANT_TASKS set by WANT_ANT_TASKS, but not the DEPEND due to caching.
# Ebuilds that need to depend conditionally on certain tasks and specify them
# differently for different eant calls can't use this simplified approach.
# You also cannot specify version or anything else than ant-*.
#
# @CODE
# WANT_ANT_TASKS="ant-junit ant-trax"
# @CODE
#The implementation of dependencies is handled by java-utils-2.eclass
#WANT_ANT_TASKS
# @ECLASS-VARIABLE: JAVA_ANT_DISABLE_ANT_CORE_DEP
# @DEFAULT_UNSET
# @DESCRIPTION:
# Setting this variable non-empty before inheriting java-ant-2 disables adding
# dev-java/ant-core into DEPEND.
if [[ -z "${JAVA_ANT_DISABLE_ANT_CORE_DEP}" ]]; then
JAVA_ANT_E_DEPEND+=" >=dev-java/ant-core-1.8.2"
[[ "${EAPI:-0}" != 0 ]] && JAVA_ANT_E_DEPEND+=":0"
fi
# add ant tasks specified in WANT_ANT_TASKS to DEPEND
local ANT_TASKS_DEPEND;
ANT_TASKS_DEPEND="$(java-pkg_ant-tasks-depend)"
# check that java-pkg_ant-tasks-depend didn't fail
if [[ $? != 0 ]]; then
eerror "${ANT_TASKS_DEPEND}"
die "java-pkg_ant-tasks-depend() failed"
fi
# We need some tools from javatoolkit. We also need ant dependencies
# constructed above.
JAVA_ANT_E_DEPEND="${JAVA_ANT_E_DEPEND}
${ANT_TASKS_DEPEND}
>=dev-java/javatoolkit-0.3.0-r2"
# this eclass must be inherited after java-pkg-2 or java-pkg-opt-2
# if it's java-pkg-opt-2, ant dependencies are pulled based on USE flag
if has java-pkg-opt-2 ${INHERITED}; then
JAVA_ANT_E_DEPEND="${JAVA_PKG_OPT_USE}? ( ${JAVA_ANT_E_DEPEND} )"
elif ! has java-pkg-2 ${INHERITED}; then
eerror "java-ant-2 eclass can only be inherited AFTER java-pkg-2 or java-pkg-opt-2"
fi
DEPEND="${JAVA_ANT_E_DEPEND}"
# @ECLASS-VARIABLE: JAVA_PKG_BSFIX
# @DESCRIPTION:
# Should we attempt to 'fix' ant build files to include the source/target
# attributes when calling javac?
JAVA_PKG_BSFIX=${JAVA_PKG_BSFIX:-"on"}
# @ECLASS-VARIABLE: JAVA_PKG_BSFIX_ALL
# @DESCRIPTION:
# If we're fixing build files, should we try to fix all the ones we can find?
JAVA_PKG_BSFIX_ALL=${JAVA_PKG_BSFIX_ALL:-"yes"}
# @ECLASS-VARIABLE: JAVA_PKG_BSFIX_NAME
# @DESCRIPTION:
# Filename of build files to fix/search for
JAVA_PKG_BSFIX_NAME=${JAVA_PKG_BSFIX_NAME:-"build.xml"}
# @ECLASS-VARIABLE: JAVA_PKG_BSFIX_TARGET_TAGS
# @DESCRIPTION:
# Targets to fix the 'source' attribute in
JAVA_PKG_BSFIX_TARGET_TAGS=${JAVA_PKG_BSFIX_TARGET_TAGS:-"javac xjavac javac.preset"}
# @ECLASS-VARIABLE: JAVA_PKG_BSFIX_SOURCE_TAGS
# @DESCRIPTION:
# Targets to fix the 'target' attribute in
JAVA_PKG_BSFIX_SOURCE_TAGS=${JAVA_PKG_BSFIX_SOURCE_TAGS:-"javadoc javac xjavac javac.preset"}
# @ECLASS-VARIABLE: JAVA_ANT_CLASSPATH_TAGS
# @DESCRIPTION:
# Targets to add the classpath attribute to
JAVA_ANT_CLASSPATH_TAGS="javac xjavac"
# @ECLASS-VARIABLE: JAVA_ANT_IGNORE_SYSTEM_CLASSES
# @DEFAULT_UNSET
# @DESCRIPTION:
# When set, <available> Ant tasks are rewritten to ignore Ant's runtime classpath.
case "${EAPI:-0}" in
0|1) : ;;
*) EXPORT_FUNCTIONS src_configure ;;
esac
# @FUNCTION: java-ant-2_src_configure
# @DESCRIPTION:
# src_configure rewrites the build.xml files automatically, unless EAPI is undefined, 0 or 1.
java-ant-2_src_configure() {
# if java support is optional, don't perform this when the USE flag is off
if has java-pkg-opt-2 ${INHERITED}; then
use ${JAVA_PKG_OPT_USE} || return
fi
# eant will call us unless called by Portage
[[ -e "${T}/java-ant-2_src_configure-run" ]] && return
[[ "${JAVA_ANT_IGNORE_SYSTEM_CLASSES}" ]] \
&& java-ant_ignore-system-classes "${S}/build.xml"
java-ant_bsfix
touch "${T}/java-ant-2_src_configure-run"
}
# @FUNCTION: java-ant_bsfix
# @INTERNAL
# @DESCRIPTION:
# Attempts to fix build files.
#
# @CODE
# Affected by variables:
# JAVA_PKG_BSFIX
# JAVA_PKG_BSFIX_ALL
# JAVA_PKG_BSFIX_NAME,
# @CODE
java-ant_bsfix() {
debug-print-function ${FUNCNAME} $*
[[ "${JAVA_PKG_BSFIX}" != "on" ]] && return
if ! java-pkg_needs-vm; then
echo "QA Notice: Package is using java-ant, but doesn't depend on a Java VM"
fi
pushd "${S}" >/dev/null || die
local find_args=""
[[ "${JAVA_PKG_BSFIX_ALL}" == "yes" ]] || find_args="-maxdepth 1"
find_args="${find_args} -type f ( -name ${JAVA_PKG_BSFIX_NAME// / -o -name } )"
local bsfix_these=() line
while IFS= read -r -d $'\0' line; do
bsfix_these+=( "${line}" )
done < <(find . ${find_args} -print0)
[[ "${bsfix_these[@]}" ]] && java-ant_bsfix_files "${bsfix_these[@]}"
popd > /dev/null || die
}
# @FUNCTION: java-ant_bsfix_files
# @USAGE: <path/to/first/build.xml> [path/to/second.build.xml ...]
# @DESCRIPTION:
# Attempts to fix named build files.
#
# @CODE
# Affected by variables:
# JAVA_PKG_BSFIX_SOURCE_TAGS
# JAVA_PKG_BSFIX_TARGET_TAGS
# JAVA_ANT_REWRITE_CLASSPATH
# JAVA_ANT_JAVADOC_INPUT_DIRS: Where we can find java sources for javadoc
# input. Can be a space separated list of
# directories
# JAVA_ANT_BSFIX_EXTRA_ARGS: You can use this to pass extra variables to the
# rewriter if you know what you are doing.
# @CODE
#
# If JAVA_ANT_JAVADOC_INPUT_DIRS is set, we will turn on the adding of a basic
# javadoc target to the ant's build.xml with the javadoc xml-rewriter feature.
# Then we will set EANT DOC TARGET to the added javadoc target
# NOTE: the variable JAVA_ANT_JAVADOC_OUTPUT_DIR points where we will
# generate the javadocs. This is a read-only variable, dont change it.
# When changing this function, make sure that it works with paths with spaces in
# them.
java-ant_bsfix_files() {
debug-print-function ${FUNCNAME} $*
[[ ${#} = 0 ]] && die "${FUNCNAME} called without arguments"
local want_source="$(java-pkg_get-source)"
local want_target="$(java-pkg_get-target)"
debug-print "${FUNCNAME}: target: ${want_target} source: ${want_source}"
if [ -z "${want_source}" -o -z "${want_target}" ]; then
eerror "Could not find valid -source/-target values"
eerror "Please file a bug about this on bugs.gentoo.org"
die "Could not find valid -source/-target values"
else
local files=()
for file in "${@}"; do
debug-print "${FUNCNAME}: ${file}"
if [[ -n "${JAVA_PKG_DEBUG}" ]]; then
cp "${file}" "${file}.orig" || die "failed to copy ${file}"
fi
if [[ ! -w "${file}" ]]; then
chmod u+w "${file}" || die "chmod u+w ${file} failed"
fi
files+=( -f "${file}" )
done
if [ -e "${EPREFIX}/usr/libexec/javatoolkit" ]; then
local rewriter3="${EPREFIX}/usr/libexec/javatoolkit/xml-rewrite-3.py"
local rewriter4="${EPREFIX}/usr/libexec/javatoolkit/build-xml-rewrite"
else
local rewriter3="${EPREFIX}/usr/$(get_libdir)/javatoolkit/bin/xml-rewrite-3.py"
local rewriter4="${EPREFIX}/usr/$(get_libdir)/javatoolkit/bin/build-xml-rewrite"
fi
if [[ -x ${rewriter4} && ${JAVA_ANT_ENCODING} ]]; then
[[ ${JAVA_ANT_REWRITE_CLASSPATH} ]] && local gcp="-g"
[[ ${JAVA_ANT_ENCODING} ]] && local enc="-e ${JAVA_ANT_ENCODING}"
echo "cElementTree rewriter"
debug-print "${rewriter4} extra args: ${gcp} ${enc}"
${rewriter4} ${gcp} ${enc} \
-c "${JAVA_PKG_BSFIX_SOURCE_TAGS}" source ${want_source} \
-c "${JAVA_PKG_BSFIX_TARGET_TAGS}" target ${want_target} \
"${@}" || die "build-xml-rewrite failed"
else
debug-print "Using third generation rewriter"
echo "Rewriting attributes"
local bsfix_extra_args=()
# WARNING KEEP THE ORDER, ESPECIALLY FOR CHANGED ATTRIBUTES!
if [[ -n ${JAVA_ANT_REWRITE_CLASSPATH} ]]; then
local cp_tags="${JAVA_ANT_CLASSPATH_TAGS// / -e }"
bsfix_extra_args+=( -g -e ${cp_tags} )
bsfix_extra_args+=( -a classpath -v '${gentoo.classpath}' )
fi
if [[ -n ${JAVA_ANT_JAVADOC_INPUT_DIRS} ]]; then
if [[ -n ${JAVA_ANT_JAVADOC_OUTPUT_DIR} ]]; then
die "Do not define JAVA_ANT_JAVADOC_OUTPUT_DIR!"
fi
# Where will our generated javadoc go.
readonly JAVA_ANT_JAVADOC_OUTPUT_DIR="${WORKDIR}/gentoo_javadoc"
mkdir -p "${JAVA_ANT_JAVADOC_OUTPUT_DIR}" || die
if has doc ${IUSE}; then
if use doc; then
if [[ -z ${EANT_DOC_TARGET} ]]; then
EANT_DOC_TARGET="gentoojavadoc"
else
die "You can't use javadoc adding and set EANT_DOC_TARGET too."
fi
for dir in ${JAVA_ANT_JAVADOC_INPUT_DIRS};do
if [[ ! -d ${dir} ]]; then
eerror "This dir: ${dir} doesnt' exists"
die "You must specify directories for javadoc input/output dirs."
fi
done
bsfix_extra_args+=( --javadoc --source-directory )
# filter third/double spaces
JAVA_ANT_JAVADOC_INPUT_DIRS=${JAVA_ANT_JAVADOC_INPUT_DIRS// /}
JAVA_ANT_JAVADOC_INPUT_DIRS=${JAVA_ANT_JAVADOC_INPUT_DIRS// /}
bsfix_extra_args+=( ${JAVA_ANT_JAVADOC_INPUT_DIRS// / --source-directory } )
bsfix_extra_args+=( --output-directory "${JAVA_ANT_JAVADOC_OUTPUT_DIR}" )
fi
else
die "You need to have doc in IUSE when using JAVA_ANT_JAVADOC_INPUT_DIRS"
fi
fi
[[ -n ${JAVA_ANT_BSFIX_EXTRA_ARGS} ]] \
&& bsfix_extra_args+=( ${JAVA_ANT_BSFIX_EXTRA_ARGS} )
debug-print "bsfix_extra_args: ${bsfix_extra_args[*]}"
${rewriter3} "${files[@]}" \
-c --source-element ${JAVA_PKG_BSFIX_SOURCE_TAGS// / --source-element } \
--source-attribute source --source-value ${want_source} \
--target-element ${JAVA_PKG_BSFIX_TARGET_TAGS// / --target-element } \
--target-attribute target --target-value ${want_target} \
--target-attribute nowarn --target-value yes \
"${bsfix_extra_args[@]}" \
|| die "xml-rewrite-3 failed: ${file}"
fi
if [[ -n "${JAVA_PKG_DEBUG}" ]]; then
for file in "${@}"; do
diff -NurbB "${file}.orig" "${file}"
done
fi
fi
return 0 # so that the 1 for diff doesn't get reported
}
# @FUNCTION: java-ant_bsfix_one
# @USAGE: <path/to/build.xml>
# @DESCRIPTION:
# Attempts to fix named build file.
#
# @CODE
# Affected by variables:
# JAVA_PKG_BSFIX_SOURCE_TAGS
# JAVA_PKG_BSFIX_TARGET_TAGS
# @CODE
java-ant_bsfix_one() {
debug-print-function ${FUNCNAME} $*
if [ -z "${1}" ]; then
eerror "${FUNCNAME} needs one argument"
die "${FUNCNAME} needs one argument"
fi
java-ant_bsfix_files "${1}"
}
# @FUNCTION: java-ant_rewrite-classpath
# @USAGE: [path/to/build.xml]
# @DESCRIPTION:
# Adds 'classpath="${gentoo.classpath}"' to specified build file.
#
# Affected by:
# JAVA_ANT_CLASSPATH_TAGS
#
# Parameter defaults to build.xml when not specified
java-ant_rewrite-classpath() {
debug-print-function ${FUNCNAME} $*
local file="${1}"
[[ -z "${1}" ]] && file=build.xml
[[ ${#} -gt 1 ]] && die "${FUNCNAME} currently can only rewrite one file."
echo "Adding gentoo.classpath to ${file}"
debug-print "java-ant_rewrite-classpath: ${file}"
cp "${file}" "${file}.orig" || die "failed to copy ${file}"
chmod u+w "${file}"
java-ant_xml-rewrite -f "${file}" --change \
-e ${JAVA_ANT_CLASSPATH_TAGS// / -e } -a classpath -v '${gentoo.classpath}'
if [[ -n "${JAVA_PKG_DEBUG}" ]]; then
diff -NurbB "${file}.orig" "${file}"
fi
}
# @FUNCTION: java-ant_ignore-system-classes
# @USAGE: [path/to/build.xml]
# @DESCRIPTION:
# Makes the available task ignore classes in the system classpath
# Parameter defaults to build.xml when not specified
java-ant_ignore-system-classes() {
debug-print-function ${FUNCNAME} $*
local file=${1:-build.xml}
echo "Changing ignoresystemclasses to true for available tasks in ${file}"
java-ant_xml-rewrite -f "${file}" --change \
-e available -a ignoresystemclasses -v "true"
}
# @FUNCTION: java-ant_xml-rewrite
# @USAGE: <xml rewriter arguments>
# @DESCRIPTION:
# Run the right xml-rewrite binary with the given arguments
java-ant_xml-rewrite() {
local gen2_1="${EPREFIX}/usr/$(get_libdir)/javatoolkit/bin/xml-rewrite-2.py"
local gen2_2="${EPREFIX}/usr/libexec/javatoolkit/xml-rewrite-2.py"
# gen1 is deprecated
if [[ -x "${gen2_2}" ]]; then
${gen2_2} "${@}" || die "${gen2_2} failed"
elif [[ -x "${gen2_1}" ]]; then
${gen2_1} "${@}" || die "${gen2_1} failed"
else
eerror "No binary for rewriting found."
eerror "Do you have dev-java/javatoolkit installed?"
die "xml-rewrite not found"
fi
}
# @FUNCTION: java-ant_rewrite-bootclasspath
# @USAGE: <version> [path/to/build.xml] [prepend] [append]
# @DESCRIPTION:
# Adds bootclasspath to javac-like tasks in build.xml filled with jars of a
# bootclasspath package of given version.
#
# @CODE
# Affected by:
# JAVA_PKG_BSFIX_TARGET_TAGS - the tags of javac tasks
#
# Parameters:
# $1 - the version of bootclasspath (e.g. 1.5), 'auto' for bootclasspath
# of the current JDK
# $2 - path to desired build.xml file, defaults to 'build.xml'
# $3 - (optional) what to prepend the bootclasspath with (to override)
# $4 - (optional) what to append to the bootclasspath
# @CODE
java-ant_rewrite-bootclasspath() {
local version="${1}"
local file="${2-build.xml}"
local extra_before="${3}"
local extra_after="${4}"
local bcp="$(java-pkg_get-bootclasspath "${version}")"
if [[ -n "${extra_before}" ]]; then
bcp="${extra_before}:${bcp}"
fi
if [[ -n "${extra_after}" ]]; then
bcp="${bcp}:${extra_after}"
fi
java-ant_xml-rewrite -f "${file}" -c -e ${JAVA_PKG_BSFIX_TARGET_TAGS// / -e } \
-a bootclasspath -v "${bcp}"
}

277
eclass/java-osgi.eclass Normal file
View File

@ -0,0 +1,277 @@
# Copyright 2007-2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: java-osgi.eclass
# @MAINTAINER:
# java@gentoo.org
# @AUTHOR:
# Java maintainers (java@gentoo.org)
# @BLURB: Java OSGi eclass
# @DESCRIPTION:
# This eclass provides functionality which is used by packages that need to be
# OSGi compliant. This means that the generated jars will have special headers
# in their manifests. Currently this is used only by Eclipse-3.3 - later we
# could extend this so that Gentoo Java system would be fully OSGi compliant.
inherit java-utils-2
# @ECLASS-VARIABLE: _OSGI_T
# @INTERNAL
# @DESCRIPTION:
# We define _OSGI_T so that it does not contain a slash at the end.
# According to Paludis guys, there is currently a proposal for EAPIs that
# would require all variables to end with a slash.
_OSGI_T="${T/%\//}"
# must get Diego to commit something like this to portability.eclass
_canonicalise() {
if type -p realpath > /dev/null; then
realpath "${@}"
elif type -p readlink > /dev/null; then
readlink -f "${@}"
else
# can't die, subshell
eerror "No readlink nor realpath found, cannot canonicalise"
fi
}
# @FUNCTION: _java-osgi_plugin
# @USAGE: <plugin name>
# @INTERNAL
# @DESCRIPTION:
# This is an internal function, not to be called directly.
#
# @CODE
# _java-osgi_plugin "JSch"
# @CODE
#
# @param $1 - bundle name
_java-osgi_plugin() {
# We hardcode Gentoo as the vendor name
cat > "${_OSGI_T}/tmp_jar/plugin.properties" <<-EOF
bundleName="${1}"
vendorName="Gentoo"
EOF
}
# @FUNCTION: _java-osgi_makejar
# @USAGE: <jar name> <symbolic name> <bundle name> <header name>
# @INTERNAL
# @DESCRIPTION:
# This is an internal function, not to be called directly.
#
# @CODE
# _java-osgi_makejar "dist/${PN}.jar" "com.jcraft.jsch" "JSch" "com.jcraft.jsch, com.jcraft.jsch.jce;x-internal:=true"
# @CODE
#
# @param $1 - name of jar to repackage with OSGi
# @param $2 - bundle symbolic name
# @param $3 - bundle name
# @param $4 - export-package header
_java-osgi_makejar() {
debug-print-function ${FUNCNAME} "$@"
(( ${#} < 4 )) && die "Four arguments are needed for _java-osgi_makejar()"
local absoluteJarPath="$(_canonicalise ${1})"
local jarName="$(basename ${1})"
mkdir "${_OSGI_T}/tmp_jar" || die "Unable to create directory ${_OSGI_T}/tmp_jar"
[[ -d "${_OSGI_T}/osgi" ]] || mkdir "${_OSGI_T}/osgi" || die "Unable to create directory ${_OSGI_T}/osgi"
cd "${_OSGI_T}/tmp_jar" && jar xf "${absoluteJarPath}" && cd - > /dev/null \
|| die "Unable to uncompress correctly the original jar"
cat > "${_OSGI_T}/tmp_jar/META-INF/MANIFEST.MF" <<-EOF
Manifest-Version: 1.0
Bundle-ManifestVersion: 2
Bundle-Name: %bundleName
Bundle-Vendor: %vendorName
Bundle-Localization: plugin
Bundle-SymbolicName: ${2}
Bundle-Version: ${PV}
Export-Package: ${4}
EOF
_java-osgi_plugin "${3}"
jar cfm "${_OSGI_T}/osgi/${jarName}" "${_OSGI_T}/tmp_jar/META-INF/MANIFEST.MF" \
-C "${_OSGI_T}/tmp_jar/" . > /dev/null || die "Unable to recreate the OSGi compliant jar"
rm -rf "${_OSGI_T}/tmp_jar"
}
# @FUNCTION: @java-osgi_dojar
# @USAGE: <jar name> <symbolic name> <bundle name> <header name>
# @DESCRIPTION:
# Rewrites a jar, and produce an OSGi compliant jar from arguments given on the command line.
# The arguments given correspond to the minimal set of headers
# that must be present on a Manifest file of an OSGi package.
# If you need more headers, you should use the *-fromfile functions below,
# that create the Manifest from a file.
# It will call java-pkg_dojar at the end.
#
# @CODE
# java-osgi_dojar "dist/${PN}.jar" "com.jcraft.jsch" "JSch" "com.jcraft.jsch, com.jcraft.jsch.jce;x-internal:=true"
# @CODE
#
# @param $1 - name of jar to repackage with OSGi
# @param $2 - bundle symbolic name
# @param $3 - bundle name
# @param $4 - export-package-header
java-osgi_dojar() {
debug-print-function ${FUNCNAME} "$@"
local jarName="$(basename ${1})"
_java-osgi_makejar "$@"
java-pkg_dojar "${_OSGI_T}/osgi/${jarName}"
}
# @FUNCTION: java-osgi_newjar
# @USAGE: <jar name> <symbolic name> <bundle name> <header name>
# @DESCRIPTION:
# Rewrites a jar, and produce an OSGi compliant jar.
# The arguments given correspond to the minimal set of headers
# that must be present on a Manifest file of an OSGi package.
# If you need more headers, you should use the *-fromfile functions below,
# that create the Manifest from a file.
# It will call java-pkg_newjar at the end.
#
# @CODE
# java-osgi_newjar "dist/${PN}.jar" "com.jcraft.jsch" "JSch" "com.jcraft.jsch, com.jcraft.jsch.jce;x-internal:=true"
# @CODE
#
# @param $1 - name of jar to repackage with OSGi
# @param $2 (optional) - name of the target jar. It will default to package name if not specified.
# @param $3 - bundle symbolic name
# @param $4 - bundle name
# @param $5 - export-package header
java-osgi_newjar() {
debug-print-function ${FUNCNAME} "$@"
local jarName="$(basename $1)"
if (( ${#} > 4 )); then
_java-osgi_makejar "${1}" "${3}" "${4}" "${5}"
java-pkg_newjar "${_OSGI_T}/osgi/${jarName}" "${2}"
else
_java-osgi_makejar "$@"
java-pkg_newjar "${_OSGI_T}/osgi/${jarName}"
fi
}
# @FUNCTION:_java-osgi_makejar-fromfile
# @USAGE: <jar to repackage with OSGi> <Manifest file> <bundle name> <version rewriting>
# @INTERNAL
# @DESCRIPTION:
# This is an internal function, not to be called directly.
#
# @CODE
# _java-osgi_makejar-fromfile "dist/${PN}.jar" "${FILESDIR}/MANIFEST.MF" "JSch" 1
# @CODE
#
# @param $1 - name of jar to repackage with OSGi
# @param $2 - path to the Manifest file
# @param $3 - bundle name
# @param $4 - automatic version rewriting (0 or 1)
_java-osgi_makejar-fromfile() {
debug-print-function ${FUNCNAME} "$@"
((${#} < 4)) && die "Four arguments are needed for _java-osgi_makejar-fromfile()"
local absoluteJarPath="$(_canonicalise ${1})"
local jarName="$(basename ${1})"
mkdir "${_OSGI_T}/tmp_jar" || die "Unable to create directory ${_OSGI_T}/tmp_jar"
[[ -d "${_OSGI_T}/osgi" ]] || mkdir "${_OSGI_T}/osgi" || die "Unable to create directory ${_OSGI_T}/osgi"
cd "${_OSGI_T}/tmp_jar" && jar xf "${absoluteJarPath}" && cd - > /dev/null \
|| die "Unable to uncompress correctly the original jar"
[[ -e "${2}" ]] || die "Manifest file ${2} not found"
# We automatically change the version if automatic version rewriting is on
if (( ${4} )); then
cat "${2}" | sed "s/Bundle-Version:.*/Bundle-Version: ${PV}/" > \
"${_OSGI_T}/tmp_jar/META-INF/MANIFEST.MF"
else
cat "${2}" > "${_OSGI_T}/tmp_jar/META-INF/MANIFEST.MF"
fi
_java-osgi_plugin "${3}"
jar cfm "${_OSGI_T}/osgi/${jarName}" "${_OSGI_T}/tmp_jar/META-INF/MANIFEST.MF" \
-C "${_OSGI_T}/tmp_jar/" . > /dev/null || die "Unable to recreate the OSGi compliant jar"
rm -rf "${_OSGI_T}/tmp_jar"
}
# @FUNCTION: java-osgi_newjar-fromfile
# @USAGE: <jar to repackage with OSGi> <Manifest file> <bundle name> <version rewriting>
# @DESCRIPTION:
# This function produces an OSGi compliant jar from a given manifest file.
# The Manifest Bundle-Version header will be replaced by the current version
# of the package, unless the --no-auto-version option is given.
# It will call java-pkg_newjar at the end.
#
# @CODE
# java-osgi_newjar-fromfile "dist/${PN}.jar" "${FILESDIR}/MANIFEST.MF" "Standard Widget Toolkit for GTK 2.0"
# @CODE
#
# @param $opt
# --no-auto-version - This option disables automatic rewriting of the
# version in the Manifest file
#
# @param $1 - name of jar to repackage with OSGi
# @param $2 (optional) - name of the target jar. It will default to package name if not specified.
# @param $3 - path to the Manifest file
# @param $4 - bundle name
java-osgi_newjar-fromfile() {
debug-print-function ${FUNCNAME} "$@"
local versionRewriting=1
if [[ "${1}" == "--no-auto-version" ]]; then
versionRewriting=0
shift
fi
local jarName="$(basename ${1})"
if (( ${#} > 3 )); then
_java-osgi_makejar-fromfile "${1}" "${3}" "${4}" "${versionRewriting}"
java-pkg_newjar "${_OSGI_T}/osgi/${jarName}" "${2}"
else
_java-osgi_makejar-fromfile "$@" "${versionRewriting}"
java-pkg_newjar "${_OSGI_T}/osgi/${jarName}"
fi
}
# @FUNCTION: java-osgi_dojar-fromfile
# @USAGE: <jar to repackage with OSGi> <Manifest file> <bundle name>
# @DESCRIPTION:
# This function produces an OSGi compliant jar from a given manifestfile.
# The Manifest Bundle-Version header will be replaced by the current version
# of the package, unless the --no-auto-version option is given.
# It will call java-pkg_dojar at the end.
#
# @CODE
# java-osgi_dojar-fromfile "dist/${PN}.jar" "${FILESDIR}/MANIFEST.MF" "Standard Widget Toolkit for GTK 2.0"
# @CODE
#
# @param $opt
# --no-auto-version - This option disables automatic rewriting of the
# version in the Manifest file
#
# @param $1 - name of jar to repackage with OSGi
# @param $2 - path to the Manifest file
# @param $3 - bundle name
java-osgi_dojar-fromfile() {
debug-print-function ${FUNCNAME} "$@"
local versionRewriting=1
if [[ "${1}" == "--no-auto-version" ]]; then
versionRewriting=0
shift
fi
local jarName="$(basename ${1})"
_java-osgi_makejar-fromfile "$@" "${versionRewriting}"
java-pkg_dojar "${_OSGI_T}/osgi/${jarName}"
}

155
eclass/java-pkg-2.eclass Normal file
View File

@ -0,0 +1,155 @@
# Copyright 2004-2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: java-pkg-2.eclass
# @MAINTAINER:
# java@gentoo.org
# @AUTHOR:
# Thomas Matthijs <axxo@gentoo.org>
# @BLURB: Eclass for Java Packages
# @DESCRIPTION:
# This eclass should be inherited for pure Java packages, or by packages which
# need to use Java.
inherit java-utils-2
# @ECLASS-VARIABLE: JAVA_PKG_IUSE
# @DEFAULT_UNSET
# @DESCRIPTION:
# Use JAVA_PKG_IUSE instead of IUSE for doc, source and examples so that
# the eclass can automatically add the needed dependencies for the java-pkg_do*
# functions.
IUSE="${JAVA_PKG_IUSE}"
# Java packages need java-config, and a fairly new release of Portage.
# JAVA_PKG_E_DEPEND is defined in java-utils.eclass.
DEPEND="${JAVA_PKG_E_DEPEND}"
# Nothing special for RDEPEND... just the same as DEPEND.
RDEPEND="${DEPEND}"
# Commons packages follow the same rules so do it here
if [[ ${CATEGORY} = dev-java && ${PN} = commons-* ]]; then
HOMEPAGE="http://commons.apache.org/${PN#commons-}/"
SRC_URI="mirror://apache/${PN/-///}/source/${P}-src.tar.gz"
fi
case "${EAPI:-0}" in
0|1) EXPORT_FUNCTIONS pkg_setup src_compile pkg_preinst ;;
*) EXPORT_FUNCTIONS pkg_setup src_prepare src_compile pkg_preinst ;;
esac
# @FUNCTION: java-pkg-2_pkg_setup
# @DESCRIPTION:
# pkg_setup initializes the Java environment
java-pkg-2_pkg_setup() {
java-pkg_init
}
# @FUNCTION: java-pkg-2_src_prepare
# @DESCRIPTION:
# wrapper for java-utils-2_src_prepare
java-pkg-2_src_prepare() {
java-utils-2_src_prepare
}
# @FUNCTION: java-pkg-2_src_compile
# @DESCRIPTION:
# Default src_compile for java packages
#
# @CODE
# Variables:
# EANT_BUILD_XML - controls the location of the build.xml (default: ./build.xml)
# EANT_FILTER_COMPILER - Calls java-pkg_filter-compiler with the value
# EANT_BUILD_TARGET - the ant target/targets to execute (default: jar)
# EANT_DOC_TARGET - the target to build extra docs under the doc use flag
# (default: javadoc; declare empty to disable completely)
# EANT_GENTOO_CLASSPATH - @see eant documention in java-utils-2.eclass
# EANT_EXTRA_ARGS - extra arguments to pass to eant
# EANT_ANT_TASKS - modifies the ANT_TASKS variable in the eant environment
# @CODE
java-pkg-2_src_compile() {
if [[ -e "${EANT_BUILD_XML:=build.xml}" ]]; then
# auto generate classpath
java-pkg_gen-cp EANT_GENTOO_CLASSPATH
[[ "${EANT_FILTER_COMPILER}" ]] && \
java-pkg_filter-compiler ${EANT_FILTER_COMPILER}
local antflags="${EANT_BUILD_TARGET:=jar}"
if has doc ${IUSE} && [[ -n "${EANT_DOC_TARGET=javadoc}" ]]; then
antflags="${antflags} $(use_doc ${EANT_DOC_TARGET})"
fi
local tasks
[[ ${EANT_ANT_TASKS} ]] && tasks="${ANT_TASKS} ${EANT_ANT_TASKS}"
ANT_TASKS="${tasks:-${ANT_TASKS}}" \
eant ${antflags} -f "${EANT_BUILD_XML}" ${EANT_EXTRA_ARGS} "${@}"
else
echo "${FUNCNAME}: ${EANT_BUILD_XML} not found so nothing to do."
fi
}
# @FUNCTION: java-pkg-2_src_test
# @DESCRIPTION:
# src_test, not exported.
java-pkg-2_src_test() {
[[ -e "${EANT_BUILD_XML:=build.xml}" ]] || return
if [[ ${EANT_TEST_TARGET} ]] || < "${EANT_BUILD_XML}" tr -d "\n" | grep -Eq "<target\b[^>]*\bname=[\"']test[\"']"; then
local opts task_re junit_re pkg
if [[ ${EANT_TEST_JUNIT_INTO} ]]; then
java-pkg_jar-from --into "${EANT_TEST_JUNIT_INTO}" junit
fi
if [[ ${EANT_TEST_GENTOO_CLASSPATH} ]]; then
EANT_GENTOO_CLASSPATH="${EANT_TEST_GENTOO_CLASSPATH}"
fi
ANT_TASKS=${EANT_TEST_ANT_TASKS:-${ANT_TASKS:-${EANT_ANT_TASKS}}}
task_re="\bdev-java/ant-junit(4)?(-[^:]+)?(:\S+)\b"
junit_re="\bdev-java/junit(-[^:]+)?(:\S+)\b"
if [[ ${DEPEND} =~ ${task_re} ]]; then
pkg="ant-junit${BASH_REMATCH[1]}${BASH_REMATCH[3]}"
pkg="${pkg%:0}"
if [[ ${ANT_TASKS} && "${ANT_TASKS}" != none ]]; then
ANT_TASKS="${ANT_TASKS} ${pkg}"
else
ANT_TASKS="${pkg}"
fi
elif [[ ${DEPEND} =~ ${junit_re} ]]; then
pkg="junit${BASH_REMATCH[2]}"
pkg="${pkg%:0}"
opts="-Djunit.jar=\"$(java-pkg_getjar ${pkg} junit.jar)\""
if [[ ${EANT_GENTOO_CLASSPATH} ]]; then
EANT_GENTOO_CLASSPATH+=",${pkg}"
else
EANT_GENTOO_CLASSPATH="${pkg}"
fi
fi
eant ${opts} -f "${EANT_BUILD_XML}" \
${EANT_EXTRA_ARGS} ${EANT_TEST_EXTRA_ARGS} ${EANT_TEST_TARGET:-test}
else
echo "${FUNCNAME}: No test target in ${EANT_BUILD_XML}"
fi
}
# @FUNCTION: java-pkg-2_pkg_preinst
# @DESCRIPTION:
# wrapper for java-utils-2_pkg_preinst
java-pkg-2_pkg_preinst() {
java-utils-2_pkg_preinst
}

View File

@ -0,0 +1,60 @@
# Copyright 2004-2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: java-pkg-opt-2.eclass
# @MAINTAINER:
# java@gentoo.org
# @AUTHOR:
# Thomas Matthijs <axxo@gentoo.org>
# @BLURB: Eclass for package with optional Java support
# @DESCRIPTION:
# Inherit this eclass instead of java-pkg-2 if you only need optional Java
# support.
inherit java-utils-2
# @ECLASS-VARIABLE: JAVA_PKG_OPT_USE
# @DESCRIPTION:
# USE flag to control if optional Java stuff is build. Defaults to 'java'.
JAVA_PKG_OPT_USE=${JAVA_PKG_OPT_USE:-java}
DEPEND="${JAVA_PKG_OPT_USE}? ( ${JAVA_PKG_E_DEPEND} )"
RDEPEND="${DEPEND}"
# See java-pkg-2.eclass for JAVA_PKG_IUSE documentation
IUSE="${JAVA_PKG_IUSE} ${JAVA_PKG_OPT_USE}"
case "${EAPI:-0}" in
0|1) EXPORT_FUNCTIONS pkg_setup pkg_preinst ;;
*) EXPORT_FUNCTIONS pkg_setup src_prepare pkg_preinst ;;
esac
# @FUNCTION: java-pkg-opt-2_pkg_setup
# @DESCRIPTION:
# default pkg_setup, wrapper for java-utils-2_pkg_init
java-pkg-opt-2_pkg_setup() {
use ${JAVA_PKG_OPT_USE} && java-pkg_init
}
# @FUNCTION: java-pkg-opt-2_src_prepare
# @DESCRIPTION:
# default src_prepare, wrapper for java-utils-2_src_prepare
java-pkg-opt-2_src_prepare() {
use ${JAVA_PKG_OPT_USE} && java-utils-2_src_prepare
case "${EAPI:-0}" in
[0-5]) ;;
*) use ${JAVA_PKG_OPT_USE} || eapply_user ;;
esac
}
# @FUNCTION: java-pkg-opt-2_pkg_preinst
# @DESCRIPTION:
# default pkg_preinst, wrapper for java-utils-2_pkg_preinst
java-pkg-opt-2_pkg_preinst() {
use ${JAVA_PKG_OPT_USE} && java-utils-2_pkg_preinst
}

View File

@ -0,0 +1,481 @@
# Copyright 2004-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: java-pkg-simple.eclass
# @MAINTAINER:
# java@gentoo.org
# @AUTHOR:
# Java maintainers (java@gentoo.org)
# @BLURB: Eclass for packaging Java software with ease.
# @DESCRIPTION:
# This class is intended to build pure Java packages from Java sources
# without the use of any build instructions shipped with the sources.
# There is no support for generating source files, or for controlling
# the META-INF of the resulting jar, although these issues may be
# addressed by an ebuild by putting corresponding files into the target
# directory before calling the src_compile function of this eclass.
inherit java-utils-2
if ! has java-pkg-2 ${INHERITED}; then
eerror "java-pkg-simple eclass can only be inherited AFTER java-pkg-2"
fi
EXPORT_FUNCTIONS src_compile src_install src_test
# We are only interested in finding all java source files, wherever they may be.
S="${WORKDIR}"
# handle dependencies for testing frameworks
if has test ${JAVA_PKG_IUSE}; then
local test_deps
for framework in ${JAVA_TESTING_FRAMEWORKS}; do
case ${framework} in
junit)
test_deps+=" dev-java/junit:0";;
junit-4)
test_deps+=" dev-java/junit:4";;
pkgdiff)
test_deps+=" amd64? ( dev-util/pkgdiff
dev-util/japi-compliance-checker )";;
testng)
test_deps+=" dev-java/testng:0";;
esac
done
[[ ${test_deps} ]] && DEPEND="test? ( ${test_deps} )"
fi
# @ECLASS-VARIABLE: JAVA_GENTOO_CLASSPATH
# @DEFAULT_UNSET
# @DESCRIPTION:
# Comma or space separated list of java packages to include in the
# class path. The packages will also be registered as runtime
# dependencies of this new package. Dependencies will be calculated
# transitively. See "java-config -l" for appropriate package names.
#
# @CODE
# JAVA_GENTOO_CLASSPATH="foo,bar-2"
# @CODE
# @ECLASS-VARIABLE: JAVA_GENTOO_CLASSPATH_EXTRA
# @DEFAULT_UNSET
# @DESCRIPTION:
# Extra list of colon separated path elements to be put on the
# classpath when compiling sources.
# @ECLASS-VARIABLE: JAVA_CLASSPATH_EXTRA
# @DEFAULT_UNSET
# @DESCRIPTION:
# An extra comma or space separated list of java packages
# that are needed only during compiling sources.
# @ECLASS-VARIABLE: JAVA_NEEDS_TOOLS
# @DEFAULT_UNSET
# @DESCRIPTION:
# Add tools.jar to the gentoo.classpath. Should only be used
# for build-time purposes, the dependency is not recorded to
# package.env.
# @ECLASS-VARIABLE: JAVA_SRC_DIR
# @DEFAULT_UNSET
# @DESCRIPTION:
# An array of directories relative to ${S} which contain the sources
# of the application. If you set ${JAVA_SRC_DIR} to a string it works
# as well. The default value "" means it will get all source files
# inside ${S}.
# For the generated source package (if source is listed in
# ${JAVA_PKG_IUSE}), it is important that these directories are
# actually the roots of the corresponding source trees.
#
# @CODE
# JAVA_SRC_DIR=( "impl/src/main/java/"
# "arquillian/weld-ee-container/src/main/java/"
# )
# @CODE
# @DESCRIPTION:
# @ECLASS-VARIABLE: JAVA_RESOURCE_DIRS
# @DEFAULT_UNSET
# @DESCRIPTION:
# An array of directories relative to ${S} which contain the
# resources of the application. If you do not set the variable,
# there will be no resources added to the compiled jar file.
#
# @CODE
# JAVA_RESOURCE_DIRS=("src/java/resources/")
# @CODE
# @ECLASS-VARIABLE: JAVA_ENCODING
# @DESCRIPTION:
# The character encoding used in the source files.
: ${JAVA_ENCODING:=UTF-8}
# @ECLASS-VARIABLE: JAVAC_ARGS
# @DEFAULT_UNSET
# @DESCRIPTION:
# Additional arguments to be passed to javac.
# @ECLASS-VARIABLE: JAVA_MAIN_CLASS
# @DEFAULT_UNSET
# @DESCRIPTION:
# If the java has a main class, you are going to set the
# variable so that we can generate a proper MANIFEST.MF
# and create a launcher.
#
# @CODE
# JAVA_MAIN_CLASS="org.gentoo.java.ebuilder.Main"
# @CODE
# @ECLASS-VARIABLE: JAVADOC_ARGS
# @DEFAULT_UNSET
# @DESCRIPTION:
# Additional arguments to be passed to javadoc.
# @ECLASS-VARIABLE: JAVA_JAR_FILENAME
# @DESCRIPTION:
# The name of the jar file to create and install.
: ${JAVA_JAR_FILENAME:=${PN}.jar}
# @ECLASS-VARIABLE: JAVA_BINJAR_FILENAME
# @DEFAULT_UNSET
# @DESCRIPTION:
# The name of the binary jar file to be installed if
# USE FLAG 'binary' is set.
# @ECLASS-VARIABLE: JAVA_LAUNCHER_FILENAME
# @DESCRIPTION:
# If ${JAVA_MAIN_CLASS} is set, we will create a launcher to
# execute the jar, and ${JAVA_LAUNCHER_FILENAME} will be the
# name of the script.
: ${JAVA_LAUNCHER_FILENAME:=${PN}-${SLOT}}
# @ECLASS-VARIABLE: JAVA_TESTING_FRAMEWORKS
# @DEFAULT_UNSET
# @DESCRIPTION:
# A space separated list that defines which tests it should launch
# during src_test.
#
# @CODE
# JAVA_TESTING_FRAMEWORKS="junit pkgdiff"
# @CODE
# @ECLASS-VARIABLE: JAVA_TEST_EXCLUDES
# @DEFAULT_UNSET
# @DESCRIPTION:
# A array of classes that should not be executed during src_test().
#
# @CODE
# JAVA_TEST_EXCLUDES=( "net.sf.cglib.CodeGenTestCase" "net.sf.cglib.TestAll" )
# @CODE
# @ECLASS-VARIABLE: JAVA_TEST_GENTOO_CLASSPATH
# @DEFAULT_UNSET
# @DESCRIPTION:
# The extra classpath we need while compiling and running the
# source code for testing.
# @ECLASS-VARIABLE: JAVA_TEST_SRC_DIR
# @DEFAULT_UNSET
# @DESCRIPTION:
# An array of directories relative to ${S} which contain the
# sources for testing. It is almost equivalent to
# ${JAVA_SRC_DIR} in src_test.
# @ECLASS-VARIABLE: JAVA_TEST_RESOURCE_DIRS
# @DEFAULT_UNSET
# @DESCRIPTION:
# It is almost equivalent to ${JAVA_RESOURCE_DIRS} in src_test.
# @FUNCTION: java-pkg-simple_getclasspath
# @USAGE: java-pkg-simple_getclasspath
# @INTERNAL
# @DESCRIPTION:
# Get proper ${classpath} from ${JAVA_GENTOO_CLASSPATH_EXTRA},
# ${JAVA_NEEDS_TOOLS}, ${JAVA_CLASSPATH_EXTRA} and
# ${JAVA_GENTOO_CLASSPATH}. We use it inside
# java-pkg-simple_src_compile and java-pkg-simple_src_test.
#
# Note that the variable "classpath" needs to be defined before
# calling this function.
java-pkg-simple_getclasspath() {
debug-print-function ${FUNCNAME} $*
local dependency
local deep_jars="--with-dependencies"
local buildonly_jars="--build-only"
# the extra classes that are not installed by portage
classpath+=":${JAVA_GENTOO_CLASSPATH_EXTRA}"
# whether we need tools.jar
[[ ${JAVA_NEEDS_TOOLS} ]] && classpath+=":$(java-config --tools)"
# the extra classes that are installed by portage
for dependency in ${JAVA_CLASSPATH_EXTRA}; do
classpath="${classpath}:$(java-pkg_getjars ${buildonly_jars}\
${deep_jars} ${dependency})"
done
# add test dependencies if USE FLAG 'test' is set
if has test ${JAVA_PKG_IUSE} && use test; then
for dependency in ${JAVA_TEST_GENTOO_CLASSPATH}; do
classpath="${classpath}:$(java-pkg_getjars ${buildonly_jars}\
${deep_jars} ${dependency})"
done
fi
# add the RUNTIME dependencies
for dependency in ${JAVA_GENTOO_CLASSPATH}; do
classpath="${classpath}:$(java-pkg_getjars ${deep_jars} ${dependency})"
done
# purify classpath
while [[ $classpath = *::* ]]; do classpath="${classpath//::/:}"; done
classpath=${classpath%:}
classpath=${classpath#:}
debug-print "CLASSPATH=${classpath}"
}
# @FUNCTION: java-pkg-simple_test_with_pkgdiff_
# @INTERNAL
# @DESCRIPTION:
# use japi-compliance-checker the ensure the compabitily of \*.class files,
# Besides, use pkgdiff to ensure the compatibility of resources.
java-pkg-simple_test_with_pkgdiff_() {
debug-print-function ${FUNCNAME} $*
if [[ ! ${ARCH} == "amd64" ]]; then
elog "For architectures other than amd64, "\
"the pkgdiff test is currently unavailable "\
"because 'dev-util/japi-compliance-checker "\
"and 'dev-util/pkgdiff' do not support those architectures."
return
fi
local report1=${PN}-japi-compliance-checker.html
local report2=${PN}-pkgdiff.html
# pkgdiff test
if [[ -f "${DISTDIR}/${JAVA_BINJAR_FILENAME}" ]]; then
# pkgdiff cannot deal with symlinks, so this is a workaround
cp "${DISTDIR}/${JAVA_BINJAR_FILENAME}" ./ \
|| die "Cannot copy binjar file to ${S}."
# japi-compliance-checker
japi-compliance-checker ${JAVA_BINJAR_FILENAME} ${JAVA_JAR_FILENAME}\
--lib=${PN} -v1 ${PV}-bin -v2 ${PV} -report-path ${report1}\
--binary\
|| die "japi-compliance-checker returns $?,"\
"check the report in ${S}/${report1}"
# ignore META-INF since it does not matter
# ignore classes because japi-compilance checker will take care of it
pkgdiff ${JAVA_BINJAR_FILENAME} ${JAVA_JAR_FILENAME}\
-vnum1 ${PV}-bin -vnum2 ${PV}\
-skip-pattern "META-INF|.class$"\
-name ${PN} -report-path ${report2}\
|| die "pkgdiff returns $?, check the report in ${S}/${report2}"
fi
}
# @FUNCTION: java-pkg-simple_prepend_resources
# @USAGE: java-pkg-simple_prepend-resources <${classes}> <"${RESOURCE_DIRS[@]}">
# @INTERNAL
# @DESCRIPTION:
# Copy things under "${JAVA_RESOURCE_DIRS[@]}" or "${JAVA_TEST_RESOURCE_DIRS[@]}"
# to ${classes}, so that `jar` will package resources together with classes.
#
# Note that you need to define a "classes" variable before calling
# this function.
java-pkg-simple_prepend_resources() {
debug-print-function ${FUNCNAME} $*
local destination="${1}"
shift 1
# return if there is no resource dirs defined
[[ "$@" ]] || return
local resources=("${@}")
# add resources directory to classpath
for resource in "${resources[@]}"; do
cp -rT "${resource:-.}" "${destination}"\
|| die "Could not copy resources from ${resource:-.} to ${destination}"
done
}
# @FUNCTION: java-pkg-simple_src_compile
# @DESCRIPTION:
# src_compile for simple bare source java packages. Finds all *.java
# sources in ${JAVA_SRC_DIR}, compiles them with the classpath
# calculated from ${JAVA_GENTOO_CLASSPATH}, and packages the resulting
# classes to a single ${JAVA_JAR_FILENAME}. If the file
# target/META-INF/MANIFEST.MF exists, it is used as the manifest of the
# created jar.
#
# If USE FLAG 'binary' exists and is set, it will just copy
# ${JAVA_BINJAR_FILENAME} to ${S} and skip the rest of src_compile.
java-pkg-simple_src_compile() {
local sources=sources.lst classes=target/classes apidoc=target/api
# auto generate classpath
java-pkg_gen-cp JAVA_GENTOO_CLASSPATH
# do not compile if we decide to install binary jar
if has binary ${JAVA_PKG_IUSE} && use binary; then
# register the runtime dependencies
for dependency in ${JAVA_GENTOO_CLASSPATH//,/ }; do
java-pkg_record-jar_ ${dependency}
done
cp "${DISTDIR}"/${JAVA_BINJAR_FILENAME} ${JAVA_JAR_FILENAME}\
|| die "Could not copy the binary jar file to ${S}"
return 0
fi
# gather sources
find "${JAVA_SRC_DIR[@]}" -name \*.java > ${sources}
# create the target directory
mkdir -p ${classes} || die "Could not create target directory"
# compile
local classpath=""
java-pkg-simple_getclasspath
java-pkg-simple_prepend_resources ${classes} "${JAVA_RESOURCE_DIRS[@]}"
ejavac -d ${classes} -encoding ${JAVA_ENCODING}\
${classpath:+-classpath ${classpath}} ${JAVAC_ARGS}\
@${sources}
# javadoc
if has doc ${JAVA_PKG_IUSE} && use doc; then
mkdir -p ${apidoc}
ejavadoc -d ${apidoc} \
-encoding ${JAVA_ENCODING} -docencoding UTF-8 -charset UTF-8 \
${classpath:+-classpath ${classpath}} ${JAVADOC_ARGS:- -quiet} \
@${sources} || die "javadoc failed"
fi
# package
local jar_args
if [[ -e ${classes}/META-INF/MANIFEST.MF ]]; then
jar_args="cfm ${JAVA_JAR_FILENAME} ${classes}/META-INF/MANIFEST.MF"
elif [[ ${JAVA_MAIN_CLASS} ]]; then
jar_args="cfe ${JAVA_JAR_FILENAME} ${JAVA_MAIN_CLASS}"
else
jar_args="cf ${JAVA_JAR_FILENAME}"
fi
jar ${jar_args} -C ${classes} . || die "jar failed"
}
# @FUNCTION: java-pkg-simple_src_install
# @DESCRIPTION:
# src_install for simple single jar java packages. Simply installs
# ${JAVA_JAR_FILENAME}. It will also install a launcher if
# ${JAVA_MAIN_CLASS} is set.
java-pkg-simple_src_install() {
local sources=sources.lst classes=target/classes apidoc=target/api
# install the jar file that we need
java-pkg_dojar ${JAVA_JAR_FILENAME}
# install a wrapper if ${JAVA_MAIN_CLASS} is defined
if [[ ${JAVA_MAIN_CLASS} ]]; then
java-pkg_dolauncher "${JAVA_LAUNCHER_FILENAME}" --main ${JAVA_MAIN_CLASS}
fi
# javadoc
if has doc ${JAVA_PKG_IUSE} && use doc; then
java-pkg_dojavadoc ${apidoc}
fi
# dosrc
if has source ${JAVA_PKG_IUSE} && use source; then
local srcdirs=""
if [[ "${JAVA_SRC_DIR[@]}" ]]; then
local parent child
for parent in "${JAVA_SRC_DIR[@]}"; do
srcdirs="${srcdirs} ${parent}"
done
else
# take all directories actually containing any sources
srcdirs="$(cut -d/ -f1 ${sources} | sort -u)"
fi
java-pkg_dosrc ${srcdirs}
fi
}
# @FUNCTION: java-pkg-simple_src_test
# @DESCRIPTION:
# src_test for simple single java jar file.
# It will perform test with frameworks that are defined in
# ${JAVA_TESTING_FRAMEWORKS}.
java-pkg-simple_src_test() {
local test_sources=test_sources.lst classes=target/test-classes
local tests_to_run classpath
# do not continue if the USE FLAG 'test' is explicitly unset
# or no ${JAVA_TESTING_FRAMEWORKS} is specified
if ! has test ${JAVA_PKG_IUSE}; then
return
elif ! use test; then
return
elif [[ ! "${JAVA_TESTING_FRAMEWORKS}" ]]; then
return
fi
# create the target directory
mkdir -p ${classes} || die "Could not create target directory for testing"
# get classpath
classpath="${classes}:${JAVA_JAR_FILENAME}"
java-pkg-simple_getclasspath
java-pkg-simple_prepend_resources ${classes} "${JAVA_TEST_RESOURCE_DIRS[@]}"
# gathering sources for testing
find "${JAVA_TEST_SRC_DIR[@]}" -name \*.java > ${test_sources}
# compile
[[ -s ${test_sources} ]] && ejavac -d ${classes} ${JAVAC_ARGS} \
-encoding ${JAVA_ENCODING} ${classpath:+-classpath ${classpath}} \
@${test_sources}
# grab a set of tests that testing framework will run
tests_to_run=$(find "${classes}" -type f\
\( -name "*Test.class"\
-o -name "Test*.class"\
-o -name "*Tests.class"\
-o -name "*TestCase.class" \)\
! -name "*Abstract*"\
! -name "*BaseTest*"\
! -name "*TestTypes*"\
! -name "*TestUtils*"\
! -name "*\$*")
tests_to_run=${tests_to_run//"${classes}"\/}
tests_to_run=${tests_to_run//.class}
tests_to_run=${tests_to_run//\//.}
# exclude extra test classes, usually corner cases
# that the code above cannot handle
for class in "${JAVA_TEST_EXCLUDES[@]}"; do
tests_to_run=${tests_to_run//${class}}
done
# launch test
for framework in ${JAVA_TESTING_FRAMEWORKS}; do
case ${framework} in
junit)
ejunit -classpath "${classpath}" ${tests_to_run};;
junit-4)
ejunit4 -classpath "${classpath}" ${tests_to_run};;
pkgdiff)
java-pkg-simple_test_with_pkgdiff_;;
testng)
etestng -classpath "${classpath}" ${tests_to_run};;
*)
elog "No suitable function found for framework ${framework}"
esac
done
}

2959
eclass/java-utils-2.eclass Normal file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,54 @@
# Copyright 1999-2015 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: java-virtuals-2.eclass
# @MAINTAINER:
# java@gentoo.org
# @AUTHOR:
# Original Author: Alistair John Bush <ali_bush@gentoo.org>
# @BLURB: Java virtuals eclass
# @DESCRIPTION:
# To provide a default (and only) src_install function for ebuilds in the
# java-virtuals category.
inherit java-utils-2
DEPEND=">=dev-java/java-config-2.2.0-r3"
RDEPEND="${DEPEND}"
S="${WORKDIR}"
EXPORT_FUNCTIONS src_install
# @FUNCTION: java-virtuals-2_src_install
# @DESCRIPTION:
# default src_install
java-virtuals-2_src_install() {
java-virtuals-2_do_write
}
# @FUNCTION: java-pkg_do_virtuals_write
# @INTERNAL
# @DESCRIPTION:
# Writes the virtual env file out to disk.
java-virtuals-2_do_write() {
java-pkg_init_paths_
dodir "${JAVA_PKG_VIRTUALS_PATH}"
{
if [[ -n "${JAVA_VIRTUAL_PROVIDES}" ]]; then
echo "PROVIDERS=\"${JAVA_VIRTUAL_PROVIDES}\""
fi
if [[ -n "${JAVA_VIRTUAL_VM}" ]]; then
echo "VM=\"${JAVA_VIRTUAL_VM}\""
fi
if [[ -n "${JAVA_VIRTUAL_VM_CLASSPATH}" ]]; then
echo "VM_CLASSPATH=\"${JAVA_VIRTUAL_VM_CLASSPATH}\""
fi
echo "MULTI_PROVIDER=\"${JAVA_VIRTUAL_MULTI=FALSE}\""
} > "${JAVA_PKG_VIRTUAL_PROVIDER}"
}

320
eclass/java-vm-2.eclass Normal file
View File

@ -0,0 +1,320 @@
# Copyright 1999-2019 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: java-vm-2.eclass
# @MAINTAINER:
# java@gentoo.org
# @SUPPORTED_EAPIS: 5 6
# @BLURB: Java Virtual Machine eclass
# @DESCRIPTION:
# This eclass provides functionality which assists with installing
# virtual machines, and ensures that they are recognized by java-config.
case ${EAPI:-0} in
5|6) ;;
*) die "EAPI=${EAPI} is not supported" ;;
esac
inherit multilib pax-utils prefix xdg-utils
EXPORT_FUNCTIONS pkg_setup pkg_postinst pkg_prerm pkg_postrm
RDEPEND="
>=dev-java/java-config-2.2.0-r3
>=app-eselect/eselect-java-0.4.0"
DEPEND="${RDEPEND}"
export WANT_JAVA_CONFIG=2
# @ECLASS-VARIABLE: JAVA_VM_CONFIG_DIR
# @INTERNAL
# @DESCRIPTION:
# Where to place the vm env file.
JAVA_VM_CONFIG_DIR="/usr/share/java-config-2/vm"
# @ECLASS-VARIABLE: JAVA_VM_DIR
# @INTERNAL
# @DESCRIPTION:
# Base directory for vm links.
JAVA_VM_DIR="/usr/lib/jvm"
# @ECLASS-VARIABLE: JAVA_VM_SYSTEM
# @INTERNAL
# @DESCRIPTION:
# Link for system-vm
JAVA_VM_SYSTEM="/etc/java-config-2/current-system-vm"
# @ECLASS-VARIABLE: JAVA_VM_BUILD_ONLY
# @DESCRIPTION:
# Set to YES to mark a vm as build-only.
JAVA_VM_BUILD_ONLY="${JAVA_VM_BUILD_ONLY:-FALSE}"
# @FUNCTION: java-vm-2_pkg_setup
# @DESCRIPTION:
# default pkg_setup
#
# Initialize vm handle.
java-vm-2_pkg_setup() {
if [[ "${SLOT}" != "0" ]]; then
VMHANDLE=${PN}-${SLOT}
else
VMHANDLE=${PN}
fi
}
# @FUNCTION: java-vm-2_pkg_postinst
# @DESCRIPTION:
# default pkg_postinst
#
# Set the generation-2 system VM, if it isn't set or the setting is
# invalid. Also update mime database.
java-vm-2_pkg_postinst() {
if [[ ! -d ${EROOT}${JAVA_VM_SYSTEM} ]]; then
eselect java-vm set system "${VMHANDLE}"
einfo "${P} set as the default system-vm."
fi
xdg_desktop_database_update
}
# @FUNCTION: java-vm-2_pkg_prerm
# @DESCRIPTION:
# default pkg_prerm
#
# Warn user if removing system-vm.
java-vm-2_pkg_prerm() {
if [[ $(GENTOO_VM= java-config -f 2>/dev/null) == ${VMHANDLE} && -z ${REPLACED_BY_VERSION} ]]; then
ewarn "It appears you are removing your system-vm! Please run"
ewarn "\"eselect java-vm list\" to list available VMs, then use"
ewarn "\"eselect java-vm set system\" to set a new system-vm!"
fi
}
# @FUNCTION: java-vm-2_pkg_postrm
# @DESCRIPTION:
# default pkg_postrm
#
# Update mime database.
java-vm-2_pkg_postrm() {
xdg_desktop_database_update
}
# @FUNCTION: get_system_arch
# @DESCRIPTION:
# Get Java specific arch name.
#
# NOTE the mips and sparc values are best guesses. Oracle uses sparcv9
# but does OpenJDK use sparc64? We don't support OpenJDK on sparc or any
# JVM on mips though so it doesn't matter much.
get_system_arch() {
local abi=${1-${ABI}}
case $(get_abi_CHOST ${abi}) in
mips*l*) echo mipsel ;;
mips*) echo mips ;;
powerpc64le*) echo ppc64le ;;
*)
case ${abi} in
*_fbsd) get_system_arch ${abi%_fbsd} ;;
arm64) echo aarch64 ;;
hppa) echo parisc ;;
sparc32) echo sparc ;;
sparc64) echo sparcv9 ;;
x86*) echo i386 ;;
*) echo ${abi} ;;
esac ;;
esac
}
# @FUNCTION: set_java_env
# @DESCRIPTION:
# Installs a vm env file.
# DEPRECATED, use java-vm_install-env instead.
set_java_env() {
debug-print-function ${FUNCNAME} $*
local platform="$(get_system_arch)"
local env_file="${ED}${JAVA_VM_CONFIG_DIR}/${VMHANDLE}"
if [[ ${1} ]]; then
local source_env_file="${1}"
else
local source_env_file="${FILESDIR}/${VMHANDLE}.env"
fi
if [[ ! -f ${source_env_file} ]]; then
die "Unable to find the env file: ${source_env_file}"
fi
dodir ${JAVA_VM_CONFIG_DIR}
sed \
-e "s/@P@/${P}/g" \
-e "s/@PN@/${PN}/g" \
-e "s/@PV@/${PV}/g" \
-e "s/@PF@/${PF}/g" \
-e "s/@SLOT@/${SLOT}/g" \
-e "s/@PLATFORM@/${platform}/g" \
-e "s/@LIBDIR@/$(get_libdir)/g" \
-e "/^LDPATH=.*lib\\/\\\"/s|\"\\(.*\\)\"|\"\\1${platform}/:\\1${platform}/server/\"|" \
< "${source_env_file}" \
> "${env_file}" || die "sed failed"
(
echo "VMHANDLE=\"${VMHANDLE}\""
echo "BUILD_ONLY=\"${JAVA_VM_BUILD_ONLY}\""
) >> "${env_file}"
eprefixify ${env_file}
[[ -n ${JAVA_PROVIDE} ]] && echo "PROVIDES=\"${JAVA_PROVIDE}\"" >> ${env_file}
local java_home=$(source "${env_file}"; echo ${JAVA_HOME})
[[ -z ${java_home} ]] && die "No JAVA_HOME defined in ${env_file}"
# Make the symlink
dodir "${JAVA_VM_DIR}"
dosym ${java_home#${EPREFIX}} ${JAVA_VM_DIR}/${VMHANDLE}
}
# @FUNCTION: java-vm_install-env
# @DESCRIPTION:
#
# Installs a Java VM environment file. The source can be specified but
# defaults to ${FILESDIR}/${VMHANDLE}.env.sh.
#
# Environment variables within this file will be resolved. You should
# escape the $ when referring to variables that should be resolved later
# such as ${JAVA_HOME}. Subshells may be used but avoid using double
# quotes. See icedtea-bin.env.sh for a good example.
java-vm_install-env() {
debug-print-function ${FUNCNAME} "$*"
local env_file="${ED}${JAVA_VM_CONFIG_DIR}/${VMHANDLE}"
local source_env_file="${1-${FILESDIR}/${VMHANDLE}.env.sh}"
if [[ ! -f "${source_env_file}" ]]; then
die "Unable to find the env file: ${source_env_file}"
fi
dodir "${JAVA_VM_CONFIG_DIR}"
# Here be dragons! ;) -- Chewi
eval echo "\"$(cat <<< "$(sed 's:":\\":g' "${source_env_file}")")\"" > "${env_file}" ||
die "failed to create Java env file"
(
echo "VMHANDLE=\"${VMHANDLE}\""
echo "BUILD_ONLY=\"${JAVA_VM_BUILD_ONLY}\""
[[ ${JAVA_PROVIDE} ]] && echo "PROVIDES=\"${JAVA_PROVIDE}\"" || true
) >> "${env_file}" || die "failed to append to Java env file"
local java_home=$(unset JAVA_HOME; source "${env_file}"; echo ${JAVA_HOME})
[[ -z ${java_home} ]] && die "No JAVA_HOME defined in ${env_file}"
# Make the symlink
dodir "${JAVA_VM_DIR}"
dosym "${java_home#${EPREFIX}}" "${JAVA_VM_DIR}/${VMHANDLE}"
}
# @FUNCTION: java-vm_set-pax-markings
# @DESCRIPTION:
# Set PaX markings on all JDK/JRE executables to allow code-generation on
# the heap by the JIT compiler.
#
# The markings need to be set prior to the first invocation of the the freshly
# built / installed VM. Be it before creating the Class Data Sharing archive or
# generating cacerts. Otherwise a PaX enabled kernel will kill the VM.
# Bug #215225 #389751
#
# @CODE
# Parameters:
# $1 - JDK/JRE base directory.
#
# Examples:
# java-vm_set-pax-markings "${S}"
# java-vm_set-pax-markings "${ED}"/opt/${P}
# @CODE
java-vm_set-pax-markings() {
debug-print-function ${FUNCNAME} "$*"
[[ $# -ne 1 ]] && die "${FUNCNAME}: takes exactly one argument"
[[ ! -f "${1}"/bin/java ]] \
&& die "${FUNCNAME}: argument needs to be JDK/JRE base directory"
local executables=( "${1}"/bin/* )
[[ -d "${1}"/jre ]] && executables+=( "${1}"/jre/bin/* )
# Usually disabling MPROTECT is sufficient.
local pax_markings="m"
# On x86 for heap sizes over 700MB disable SEGMEXEC and PAGEEXEC as well.
use x86 && pax_markings+="sp"
pax-mark ${pax_markings} $(list-paxables "${executables[@]}")
}
# @FUNCTION: java-vm_revdep-mask
# @DESCRIPTION:
# Installs a revdep-rebuild control file which SEARCH_DIR_MASK set to the path
# where the VM is installed. Prevents pointless rebuilds - see bug #177925.
# Also gives a notice to the user.
#
# @CODE
# Parameters:
# $1 - Path of the VM (defaults to /opt/${P} if not set)
#
# Examples:
# java-vm_revdep-mask
# java-vm_revdep-mask /path/to/jdk/
#
# @CODE
java-vm_revdep-mask() {
debug-print-function ${FUNCNAME} "$*"
local VMROOT="${1-"${EPREFIX}"/opt/${P}}"
dodir /etc/revdep-rebuild
echo "SEARCH_DIRS_MASK=\"${VMROOT}\"" >> "${ED}/etc/revdep-rebuild/61-${VMHANDLE}" \
|| die "Failed to write revdep-rebuild mask file"
}
# @FUNCTION: java-vm_sandbox-predict
# @DESCRIPTION:
# Install a sandbox control file. Specified paths won't cause a sandbox
# violation if opened read write but no write takes place. See bug 388937#c1
#
# @CODE
# Examples:
# java-vm_sandbox-predict /dev/random /proc/self/coredump_filter
# @CODE
java-vm_sandbox-predict() {
debug-print-function ${FUNCNAME} "$*"
[[ -z "${1}" ]] && die "${FUNCNAME} takes at least one argument"
local path path_arr=("$@")
# subshell this to prevent IFS bleeding out dependant on bash version.
# could use local, which *should* work, but that requires a lot of testing.
path=$(IFS=":"; echo "${path_arr[*]}")
dodir /etc/sandbox.d
echo "SANDBOX_PREDICT=\"${path}\"" > "${ED}/etc/sandbox.d/20${VMHANDLE}" \
|| die "Failed to write sandbox control file"
}

263
eclass/kde.org.eclass Normal file
View File

@ -0,0 +1,263 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: kde.org.eclass
# @MAINTAINER:
# kde@gentoo.org
# @SUPPORTED_EAPIS: 7
# @BLURB: Support eclass for packages that are hosted on kde.org infrastructure.
# @DESCRIPTION:
# This eclass is mainly providing facilities for the upstream release groups
# Frameworks, Plasma, Release Service to assemble default SRC_URI for tarballs,
# set up git-r3.eclass for stable/master branch versions or restrict access to
# unreleased (packager access only) tarballs in Gentoo KDE overlay, but it may
# be also used by any other package hosted on kde.org.
# It also contains default meta variables for settings not specific to any
# particular build system.
if [[ -z ${_KDE_ORG_ECLASS} ]]; then
_KDE_ORG_ECLASS=1
# @ECLASS-VARIABLE: KDE_BUILD_TYPE
# @DESCRIPTION:
# If PV matches "*9999*", this is automatically set to "live".
# Otherwise, this is automatically set to "release".
KDE_BUILD_TYPE="release"
if [[ ${PV} = *9999* ]]; then
KDE_BUILD_TYPE="live"
fi
export KDE_BUILD_TYPE
if [[ ${KDE_BUILD_TYPE} = live ]]; then
inherit git-r3
fi
EXPORT_FUNCTIONS pkg_nofetch src_unpack
# @ECLASS-VARIABLE: KDE_ORG_NAME
# @DESCRIPTION:
# If unset, default value is set to ${PN}.
# Name of the package as hosted on kde.org mirrors.
: ${KDE_ORG_NAME:=$PN}
# @ECLASS-VARIABLE: KDE_RELEASE_SERVICE
# @DESCRIPTION:
# If set to "false", do nothing.
# If set to "true", set SRC_URI accordingly and apply KDE_UNRELEASED.
: ${KDE_RELEASE_SERVICE:=false}
# @ECLASS-VARIABLE: KDE_SELINUX_MODULE
# @DESCRIPTION:
# If set to "none", do nothing.
# For any other value, add selinux to IUSE, and depending on that useflag
# add a dependency on sec-policy/selinux-${KDE_SELINUX_MODULE} to (R)DEPEND.
: ${KDE_SELINUX_MODULE:=none}
case ${KDE_SELINUX_MODULE} in
none) ;;
*)
IUSE+=" selinux"
RDEPEND+=" selinux? ( sec-policy/selinux-${KDE_SELINUX_MODULE} )"
;;
esac
# @ECLASS-VARIABLE: KDE_UNRELEASED
# @INTERNAL
# @DESCRIPTION:
# An array of $CATEGORY-$PV pairs of packages that are unreleased upstream.
# Any package matching this will have fetch restriction enabled, and receive
# a proper error message via pkg_nofetch.
KDE_UNRELEASED=( )
HOMEPAGE="https://kde.org/"
case ${CATEGORY} in
kde-apps)
KDE_RELEASE_SERVICE=true
;;
kde-plasma)
HOMEPAGE="https://kde.org/plasma-desktop"
;;
kde-frameworks)
HOMEPAGE="https://kde.org/products/frameworks/"
SLOT=5/${PV}
[[ ${KDE_BUILD_TYPE} = release ]] && SLOT=$(ver_cut 1)/$(ver_cut 1-2)
;;
*) ;;
esac
_kde.org_is_unreleased() {
local pair
for pair in "${KDE_UNRELEASED[@]}" ; do
if [[ "${pair}" = "${CATEGORY}-${PV}" ]]; then
return 0
elif [[ ${KDE_RELEASE_SERVICE} = true ]]; then
if [[ "${pair/kde-apps/${CATEGORY}}" = "${CATEGORY}-${PV}" ]]; then
return 0
fi
fi
done
return 1
}
# Determine fetch location for released tarballs
_kde.org_calculate_src_uri() {
debug-print-function ${FUNCNAME} "$@"
local _src_uri="mirror://kde/"
if [[ ${KDE_RELEASE_SERVICE} = true ]]; then
case ${PV} in
??.??.[6-9]? )
_src_uri+="unstable/release-service/${PV}/src/"
RESTRICT+=" mirror"
;;
*) _src_uri+="stable/release-service/${PV}/src/" ;;
esac
fi
case ${CATEGORY} in
kde-frameworks)
_src_uri+="stable/frameworks/$(ver_cut 1-2)/"
case ${PN} in
kdelibs4support | \
kdesignerplugin | \
kdewebkit | \
khtml | \
kjs | \
kjsembed | \
kmediaplayer | \
kross | \
kxmlrpcclient)
_src_uri+="portingAids/"
;;
esac
;;
kde-plasma)
case ${PV} in
5.??.[6-9]?* )
_src_uri+="unstable/plasma/$(ver_cut 1-3)/"
RESTRICT+=" mirror"
;;
*) _src_uri+="stable/plasma/$(ver_cut 1-3)/" ;;
esac
;;
esac
if [[ ${PN} = kdevelop* ]]; then
case ${PV} in
*.*.[6-9]? )
_src_uri+="unstable/kdevelop/${PV}/src/"
RESTRICT+=" mirror"
;;
*) _src_uri+="stable/kdevelop/${PV}/src/" ;;
esac
fi
SRC_URI="${_src_uri}${KDE_ORG_NAME}-${PV}.tar.xz"
if _kde.org_is_unreleased ; then
RESTRICT+=" fetch"
fi
}
# Determine fetch location for live sources
_kde.org_calculate_live_repo() {
debug-print-function ${FUNCNAME} "$@"
SRC_URI=""
# @ECLASS-VARIABLE: EGIT_MIRROR
# @DESCRIPTION:
# This variable allows easy overriding of default kde mirror service
# (anongit) with anything else you might want to use.
EGIT_MIRROR=${EGIT_MIRROR:=https://invent.kde.org/kde}
if [[ ${PV} == ??.??.49.9999 && ${KDE_RELEASE_SERVICE} = true ]]; then
EGIT_BRANCH="release/$(ver_cut 1-2)"
fi
if [[ ${PV} != 9999 && ${CATEGORY} = kde-plasma ]]; then
EGIT_BRANCH="Plasma/$(ver_cut 1-2)"
fi
if [[ ${PV} != 9999 && ${PN} = kdevelop* ]]; then
EGIT_BRANCH="$(ver_cut 1-2)"
fi
# @ECLASS-VARIABLE: EGIT_REPONAME
# @DESCRIPTION:
# This variable allows overriding of default repository
# name. Specify only if this differs from PN and KDE_ORG_NAME.
EGIT_REPO_URI="${EGIT_MIRROR}/${EGIT_REPONAME:=$KDE_ORG_NAME}.git"
}
case ${KDE_BUILD_TYPE} in
live) _kde.org_calculate_live_repo ;;
*)
_kde.org_calculate_src_uri
debug-print "${LINENO} ${ECLASS} ${FUNCNAME}: SRC_URI is ${SRC_URI}"
;;
esac
if [[ ${KDE_BUILD_TYPE} = release ]]; then
S=${WORKDIR}/${KDE_ORG_NAME}-${PV}
fi
# @FUNCTION: kde.org_pkg_nofetch
# @DESCRIPTION:
# Intended for use in the KDE overlay. If this package matches something in
# KDE_UNRELEASED, display a giant warning that the package has not yet been
# released upstream and should not be used.
kde.org_pkg_nofetch() {
if ! _kde.org_is_unreleased ; then
return
fi
local sched_uri="https://community.kde.org/Schedules"
case ${CATEGORY} in
kde-frameworks) sched_uri+="/Frameworks" ;;
kde-plasma) sched_uri+="/Plasma_5" ;;
*)
[[ ${KDE_RELEASE_SERVICE} = true ]] &&
sched_uri+="/release_service/$(ver_cut 1-2)_Release_Schedule"
;;
esac
eerror " _ _ _ _ ____ _____ _ _____ _ ____ _____ ____ "
eerror "| | | | \ | | _ \| ____| | | ____| / \ / ___|| ____| _ \ "
eerror "| | | | \| | |_) | _| | | | _| / _ \ \___ \| _| | | | |"
eerror "| |_| | |\ | _ <| |___| |___| |___ / ___ \ ___) | |___| |_| |"
eerror " \___/|_| \_|_| \_\_____|_____|_____/_/ \_\____/|_____|____/ "
eerror " "
eerror " ____ _ ____ _ __ _ ____ _____ "
eerror "| _ \ / \ / ___| |/ / / \ / ___| ____|"
eerror "| |_) / _ \| | | ' / / _ \| | _| _| "
eerror "| __/ ___ \ |___| . \ / ___ \ |_| | |___ "
eerror "|_| /_/ \_\____|_|\_\/_/ \_\____|_____|"
eerror
eerror "${CATEGORY}/${P} has not been released to the public yet"
eerror "and is only available to packagers right now."
eerror ""
eerror "This is not a bug. Please do not file bugs or contact upstream about this."
eerror ""
eerror "Please consult the upstream release schedule to see when this "
eerror "package is scheduled to be released:"
eerror "${sched_uri}"
}
# @FUNCTION: kde.org_src_unpack
# @DESCRIPTION:
# Unpack the sources, automatically handling both release and live ebuilds.
kde.org_src_unpack() {
debug-print-function ${FUNCNAME} "$@"
case ${KDE_BUILD_TYPE} in
live) git-r3_src_unpack ;&
*) default ;;
esac
}
fi

1674
eclass/kernel-2.eclass Normal file

File diff suppressed because it is too large Load Diff

235
eclass/kernel-build.eclass Normal file
View File

@ -0,0 +1,235 @@
# Copyright 2020-2021 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: kernel-build.eclass
# @MAINTAINER:
# Distribution Kernel Project <dist-kernel@gentoo.org>
# @AUTHOR:
# Michał Górny <mgorny@gentoo.org>
# @SUPPORTED_EAPIS: 7
# @BLURB: Build mechanics for Distribution Kernels
# @DESCRIPTION:
# This eclass provides the logic to build a Distribution Kernel from
# source and install it. Post-install and test logic is inherited
# from kernel-install.eclass.
#
# The ebuild must take care of unpacking the kernel sources, copying
# an appropriate .config into them (e.g. in src_prepare()) and setting
# correct S. The eclass takes care of respecting savedconfig, building
# the kernel and installing it along with its modules and subset
# of sources needed to build external modules.
if [[ ! ${_KERNEL_BUILD_ECLASS} ]]; then
case "${EAPI:-0}" in
0|1|2|3|4|5|6)
die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
;;
7)
;;
*)
die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
;;
esac
inherit savedconfig toolchain-funcs kernel-install
BDEPEND="
sys-devel/bc
sys-devel/flex
virtual/libelf
virtual/yacc"
# @FUNCTION: kernel-build_src_configure
# @DESCRIPTION:
# Prepare the toolchain for building the kernel, get the default .config
# or restore savedconfig, and get build tree configured for modprep.
kernel-build_src_configure() {
debug-print-function ${FUNCNAME} "${@}"
# force ld.bfd if we can find it easily
local LD="$(tc-getLD)"
if type -P "${LD}.bfd" &>/dev/null; then
LD+=.bfd
fi
tc-export_build_env
MAKEARGS=(
V=1
HOSTCC="$(tc-getBUILD_CC)"
HOSTCXX="$(tc-getBUILD_CXX)"
HOSTCFLAGS="${BUILD_CFLAGS}"
HOSTLDFLAGS="${BUILD_LDFLAGS}"
CROSS_COMPILE=${CHOST}-
AS="$(tc-getAS)"
CC="$(tc-getCC)"
LD="${LD}"
AR="$(tc-getAR)"
NM="$(tc-getNM)"
STRIP=":"
OBJCOPY="$(tc-getOBJCOPY)"
OBJDUMP="$(tc-getOBJDUMP)"
# we need to pass it to override colliding Gentoo envvar
ARCH=$(tc-arch-kernel)
)
restore_config .config
[[ -f .config ]] || die "Ebuild error: please copy default config into .config"
if [[ -z "${KV_LOCALVERSION}" ]]; then
KV_LOCALVERSION=$(sed -n -e 's#^CONFIG_LOCALVERSION="\(.*\)"$#\1#p' \
.config)
fi
mkdir -p "${WORKDIR}"/modprep || die
mv .config "${WORKDIR}"/modprep/ || die
emake O="${WORKDIR}"/modprep "${MAKEARGS[@]}" olddefconfig
emake O="${WORKDIR}"/modprep "${MAKEARGS[@]}" modules_prepare
cp -pR "${WORKDIR}"/modprep "${WORKDIR}"/build || die
}
# @FUNCTION: kernel-build_src_compile
# @DESCRIPTION:
# Compile the kernel sources.
kernel-build_src_compile() {
debug-print-function ${FUNCNAME} "${@}"
emake O="${WORKDIR}"/build "${MAKEARGS[@]}" all
}
# @FUNCTION: kernel-build_src_test
# @DESCRIPTION:
# Test the built kernel via qemu. This just wraps the logic
# from kernel-install.eclass with the correct paths.
kernel-build_src_test() {
debug-print-function ${FUNCNAME} "${@}"
local targets=( modules_install )
# on arm or arm64 you also need dtb
if use arm || use arm64; then
targets+=( dtbs_install )
fi
emake O="${WORKDIR}"/build "${MAKEARGS[@]}" \
INSTALL_MOD_PATH="${T}" INSTALL_PATH="${ED}/boot" "${targets[@]}"
local ver="${PV}${KV_LOCALVERSION}"
kernel-install_test "${ver}" \
"${WORKDIR}/build/$(dist-kernel_get_image_path)" \
"${T}/lib/modules/${ver}"
}
# @FUNCTION: kernel-build_src_install
# @DESCRIPTION:
# Install the built kernel along with subset of sources
# into /usr/src/linux-${PV}. Install the modules. Save the config.
kernel-build_src_install() {
debug-print-function ${FUNCNAME} "${@}"
# do not use 'make install' as it behaves differently based
# on what kind of installkernel is installed
local targets=( modules_install )
# on arm or arm64 you also need dtb
if use arm || use arm64; then
targets+=( dtbs_install )
fi
emake O="${WORKDIR}"/build "${MAKEARGS[@]}" \
INSTALL_MOD_PATH="${ED}" INSTALL_PATH="${ED}/boot" "${targets[@]}"
# note: we're using mv rather than doins to save space and time
# install main and arch-specific headers first, and scripts
local kern_arch=$(tc-arch-kernel)
local ver="${PV}${KV_LOCALVERSION}"
dodir "/usr/src/linux-${ver}/arch/${kern_arch}"
mv include scripts "${ED}/usr/src/linux-${ver}/" || die
mv "arch/${kern_arch}/include" \
"${ED}/usr/src/linux-${ver}/arch/${kern_arch}/" || die
# some arches need module.lds linker script to build external modules
if [[ -f arch/${kern_arch}/kernel/module.lds ]]; then
insinto "/usr/src/linux-${ver}/arch/${kern_arch}/kernel"
doins "arch/${kern_arch}/kernel/module.lds"
fi
# remove everything but Makefile* and Kconfig*
find -type f '!' '(' -name 'Makefile*' -o -name 'Kconfig*' ')' \
-delete || die
find -type l -delete || die
cp -p -R * "${ED}/usr/src/linux-${ver}/" || die
cd "${WORKDIR}" || die
# strip out-of-source build stuffs from modprep
# and then copy built files as well
find modprep -type f '(' \
-name Makefile -o \
-name '*.[ao]' -o \
'(' -name '.*' -a -not -name '.config' ')' \
')' -delete || die
rm modprep/source || die
cp -p -R modprep/. "${ED}/usr/src/linux-${ver}"/ || die
# install the kernel and files needed for module builds
insinto "/usr/src/linux-${ver}"
doins build/{System.map,Module.symvers}
local image_path=$(dist-kernel_get_image_path)
cp -p "build/${image_path}" "${ED}/usr/src/linux-${ver}/${image_path}" || die
# building modules fails with 'vmlinux has no symtab?' if stripped
use ppc64 && dostrip -x "/usr/src/linux-${ver}/${image_path}"
# strip empty directories
find "${D}" -type d -empty -exec rmdir {} + || die
# fix source tree and build dir symlinks
dosym ../../../usr/src/linux-${ver} /lib/modules/${ver}/build
dosym ../../../usr/src/linux-${ver} /lib/modules/${ver}/source
save_config build/.config
}
# @FUNCTION: kernel-build_pkg_postinst
# @DESCRIPTION:
# Combine postinst from kernel-install and savedconfig eclasses.
kernel-build_pkg_postinst() {
kernel-install_pkg_postinst
savedconfig_pkg_postinst
}
# @FUNCTION: kernel-build_merge_configs
# @USAGE: [distro.config...]
# @DESCRIPTION:
# Merge the config files specified as arguments (if any) into
# the '.config' file in the current directory, then merge
# any user-supplied configs from ${BROOT}/etc/kernel/config.d/*.config.
# The '.config' file must exist already and contain the base
# configuration.
kernel-build_merge_configs() {
debug-print-function ${FUNCNAME} "${@}"
[[ -f .config ]] || die "${FUNCNAME}: .config does not exist"
has .config "${@}" &&
die "${FUNCNAME}: do not specify .config as parameter"
local shopt_save=$(shopt -p nullglob)
shopt -s nullglob
local user_configs=( "${BROOT}"/etc/kernel/config.d/*.config )
shopt -u nullglob
if [[ ${#user_configs[@]} -gt 0 ]]; then
elog "User config files are being applied:"
local x
for x in "${user_configs[@]}"; do
elog "- ${x}"
done
fi
./scripts/kconfig/merge_config.sh -m -r \
.config "${@}" "${user_configs[@]}" || die
}
_KERNEL_BUILD_ECLASS=1
fi
EXPORT_FUNCTIONS src_configure src_compile src_test src_install pkg_postinst

View File

@ -0,0 +1,433 @@
# Copyright 2020-2021 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: kernel-install.eclass
# @MAINTAINER:
# Distribution Kernel Project <dist-kernel@gentoo.org>
# @AUTHOR:
# Michał Górny <mgorny@gentoo.org>
# @SUPPORTED_EAPIS: 7
# @BLURB: Installation mechanics for Distribution Kernels
# @DESCRIPTION:
# This eclass provides the logic needed to test and install different
# kinds of Distribution Kernel packages, including both kernels built
# from source and distributed as binaries. The eclass relies on the
# ebuild installing a subset of built kernel tree into
# /usr/src/linux-${PV} containing the kernel image in its standard
# location and System.map.
#
# The eclass exports src_test, pkg_postinst and pkg_postrm.
# Additionally, the inherited mount-boot eclass exports pkg_pretend.
# It also stubs out pkg_preinst and pkg_prerm defined by mount-boot.
# @ECLASS-VARIABLE: KV_LOCALVERSION
# @DEFAULT_UNSET
# @DESCRIPTION:
# A string containing the kernel LOCALVERSION, e.g. '-gentoo'.
# Needs to be set only when installing binary kernels,
# kernel-build.eclass obtains it from kernel config.
if [[ ! ${_KERNEL_INSTALL_ECLASS} ]]; then
case "${EAPI:-0}" in
0|1|2|3|4|5|6)
die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
;;
7)
;;
*)
die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
;;
esac
inherit dist-kernel-utils mount-boot toolchain-funcs
SLOT="${PV}"
IUSE="+initramfs test"
RESTRICT+="
!test? ( test )
test? ( userpriv )
arm? ( test )
"
# install-DEPEND actually
# note: we need installkernel with initramfs support!
RDEPEND="
|| (
sys-kernel/installkernel-gentoo
sys-kernel/installkernel-systemd-boot
)
initramfs? ( >=sys-kernel/dracut-049-r3 )"
BDEPEND="
test? (
dev-tcltk/expect
sys-apps/coreutils
sys-kernel/dracut
sys-fs/e2fsprogs
amd64? ( app-emulation/qemu[qemu_softmmu_targets_x86_64] )
arm64? ( app-emulation/qemu[qemu_softmmu_targets_aarch64] )
ppc64? ( app-emulation/qemu[qemu_softmmu_targets_ppc64] )
x86? ( app-emulation/qemu[qemu_softmmu_targets_i386] )
)"
# @FUNCTION: kernel-install_update_symlink
# @USAGE: <target> <version>
# @DESCRIPTION:
# Update the kernel source symlink at <target> (full path) with a link
# to <target>-<version> if it's either missing or pointing out to
# an older version of this package.
kernel-install_update_symlink() {
debug-print-function ${FUNCNAME} "${@}"
[[ ${#} -eq 2 ]] || die "${FUNCNAME}: invalid arguments"
local target=${1}
local version=${2}
if [[ ! -e ${target} ]]; then
ebegin "Creating ${target} symlink"
ln -f -n -s "${target##*/}-${version}" "${target}"
eend ${?}
else
local symlink_target=$(readlink "${target}")
local symlink_ver=${symlink_target#${target##*/}-}
local updated=
if [[ ${symlink_target} == ${target##*/}-* && \
-z ${symlink_ver//[0-9.]/} ]]
then
local symlink_pkg=${CATEGORY}/${PN}-${symlink_ver}
# if the current target is either being replaced, or still
# installed (probably depclean candidate), update the symlink
if has "${symlink_ver}" ${REPLACING_VERSIONS} ||
has_version -r "~${symlink_pkg}"
then
ebegin "Updating ${target} symlink"
ln -f -n -s "${target##*/}-${version}" "${target}"
eend ${?}
updated=1
fi
fi
if [[ ! ${updated} ]]; then
elog "${target} points at another kernel, leaving it as-is."
elog "Please use 'eselect kernel' to update it when desired."
fi
fi
}
# @FUNCTION: kernel-install_get_qemu_arch
# @DESCRIPTION:
# Get appropriate qemu suffix for the current ${ARCH}.
kernel-install_get_qemu_arch() {
debug-print-function ${FUNCNAME} "${@}"
case ${ARCH} in
amd64)
echo x86_64
;;
x86)
echo i386
;;
arm)
echo arm
;;
arm64)
echo aarch64
;;
ppc64)
echo ppc64
;;
*)
die "${FUNCNAME}: unsupported ARCH=${ARCH}"
;;
esac
}
# @FUNCTION: kernel-install_create_init
# @USAGE: <filename>
# @DESCRIPTION:
# Create minimal /sbin/init
kernel-install_create_init() {
debug-print-function ${FUNCNAME} "${@}"
[[ ${#} -eq 1 ]] || die "${FUNCNAME}: invalid arguments"
[[ -z ${1} ]] && die "${FUNCNAME}: empty argument specified"
local output="${1}"
[[ -f ${output} ]] && die "${FUNCNAME}: ${output} already exists"
cat <<-_EOF_ >"${T}/init.c" || die
#include <stdio.h>
int main() {
printf("Hello, World!\n");
return 0;
}
_EOF_
$(tc-getBUILD_CC) -Os -static "${T}/init.c" -o "${output}" || die
$(tc-getBUILD_STRIP) "${output}" || die
}
# @FUNCTION: kernel-install_create_qemu_image
# @USAGE: <filename>
# @DESCRIPTION:
# Create minimal qemu raw image
kernel-install_create_qemu_image() {
debug-print-function ${FUNCNAME} "${@}"
[[ ${#} -eq 1 ]] || die "${FUNCNAME}: invalid arguments"
[[ -z ${1} ]] && die "${FUNCNAME}: empty argument specified"
local image="${1}"
[[ -f ${image} ]] && die "${FUNCNAME}: ${image} already exists"
local imageroot="${T}/imageroot"
[[ -d ${imageroot} ]] && die "${FUNCNAME}: ${imageroot} already exists"
mkdir "${imageroot}" || die
# some layout needed to pass dracut's usable_root() validation
mkdir -p "${imageroot}"/{bin,dev,etc,lib,proc,root,sbin,sys} || die
touch "${imageroot}/lib/ld-fake.so" || die
kernel-install_create_init "${imageroot}/sbin/init"
# image may be smaller if needed
truncate -s 4M "${image}" || die
mkfs.ext4 -v -d "${imageroot}" -L groot "${image}" || die
}
# @FUNCTION: kernel-install_test
# @USAGE: <version> <image> <modules>
# @DESCRIPTION:
# Test that the kernel can successfully boot a minimal system image
# in qemu. <version> is the kernel version, <image> path to the image,
# <modules> path to module tree.
kernel-install_test() {
debug-print-function ${FUNCNAME} "${@}"
[[ ${#} -eq 3 ]] || die "${FUNCNAME}: invalid arguments"
local version=${1}
local image=${2}
local modules=${3}
local qemu_arch=$(kernel-install_get_qemu_arch)
dracut \
--conf /dev/null \
--confdir /dev/null \
--no-hostonly \
--kmoddir "${modules}" \
"${T}/initrd" "${version}" || die
kernel-install_create_qemu_image "${T}/fs.img"
cd "${T}" || die
local qemu_extra_args=
local qemu_extra_append=
case ${qemu_arch} in
aarch64)
qemu_extra_args="-M virt -cpu cortex-a57 -smp 1"
qemu_extra_append="console=ttyAMA0"
;;
i386|x86_64)
qemu_extra_args="-cpu max"
qemu_extra_append="console=ttyS0,115200n8"
;;
ppc64)
qemu_extra_args="-nodefaults"
;;
*)
:
;;
esac
cat > run.sh <<-EOF || die
#!/bin/sh
exec qemu-system-${qemu_arch} \
${qemu_extra_args} \
-m 512M \
-nographic \
-no-reboot \
-kernel '${image}' \
-initrd '${T}/initrd' \
-serial mon:stdio \
-drive file=fs.img,format=raw,index=0,media=disk \
-append 'root=LABEL=groot ${qemu_extra_append}'
EOF
chmod +x run.sh || die
# TODO: initramfs does not let core finish starting on some systems,
# figure out how to make it better at that
expect - <<-EOF || die "Booting kernel failed"
set timeout 900
spawn ./run.sh
expect {
"terminating on signal" {
send_error "\n* Qemu killed"
exit 1
}
"OS terminated" {
send_error "\n* Qemu terminated OS"
exit 1
}
"Kernel panic" {
send_error "\n* Kernel panic"
exit 1
}
"Entering emergency mode" {
send_error "\n* Initramfs failed to start the system"
exit 1
}
"Hello, World!" {
send_error "\n* Booted successfully"
exit 0
}
timeout {
send_error "\n* Kernel boot timed out"
exit 2
}
}
EOF
}
# @FUNCTION: kernel-install_pkg_pretend
# @DESCRIPTION:
# Check for missing optional dependencies and output warnings.
kernel-install_pkg_pretend() {
debug-print-function ${FUNCNAME} "${@}"
if ! has_version -d sys-kernel/linux-firmware; then
ewarn "sys-kernel/linux-firmware not found installed on your system."
ewarn "This package provides various firmware files that may be needed"
ewarn "for your hardware to work. If in doubt, it is recommended"
ewarn "to pause or abort the build process and install it before"
ewarn "resuming."
if use initramfs; then
elog
elog "If you decide to install linux-firmware later, you can rebuild"
elog "the initramfs via issuing a command equivalent to:"
elog
elog " emerge --config ${CATEGORY}/${PN}:${SLOT}"
fi
fi
}
# @FUNCTION: kernel-install_src_test
# @DESCRIPTION:
# Boilerplate function to remind people to call the tests.
kernel-install_src_test() {
debug-print-function ${FUNCNAME} "${@}"
die "Please redefine src_test() and call kernel-install_test()."
}
# @FUNCTION: kernel-install_pkg_preinst
# @DESCRIPTION:
# Stub out mount-boot.eclass.
kernel-install_pkg_preinst() {
debug-print-function ${FUNCNAME} "${@}"
# (no-op)
}
# @FUNCTION: kernel-install_install_all
# @USAGE: <ver>
# @DESCRIPTION:
# Build an initramfs for the kernel and install the kernel. This is
# called from pkg_postinst() and pkg_config(). <ver> is the full
# kernel version.
kernel-install_install_all() {
debug-print-function ${FUNCNAME} "${@}"
[[ ${#} -eq 1 ]] || die "${FUNCNAME}: invalid arguments"
local ver=${1}
local success=
# not an actual loop but allows error handling with 'break'
while :; do
nonfatal mount-boot_check_status || break
local image_path=$(dist-kernel_get_image_path)
if use initramfs; then
# putting it alongside kernel image as 'initrd' makes
# kernel-install happier
nonfatal dist-kernel_build_initramfs \
"${EROOT}/usr/src/linux-${ver}/${image_path%/*}/initrd" \
"${ver}" || break
fi
nonfatal dist-kernel_install_kernel "${ver}" \
"${EROOT}/usr/src/linux-${ver}/${image_path}" \
"${EROOT}/usr/src/linux-${ver}/System.map" || break
success=1
break
done
if [[ ! ${success} ]]; then
eerror
eerror "The kernel files were copied to disk successfully but the kernel"
eerror "was not deployed successfully. Once you resolve the problems,"
eerror "please run the equivalent of the following command to try again:"
eerror
eerror " emerge --config ${CATEGORY}/${PN}:${SLOT}"
die "Kernel install failed, please fix the problems and run emerge --config ${CATEGORY}/${PN}:${SLOT}"
fi
}
# @FUNCTION: kernel-install_pkg_postinst
# @DESCRIPTION:
# Build an initramfs for the kernel, install it and update
# the /usr/src/linux symlink.
kernel-install_pkg_postinst() {
debug-print-function ${FUNCNAME} "${@}"
local ver="${PV}${KV_LOCALVERSION}"
kernel-install_update_symlink "${EROOT}/usr/src/linux" "${ver}"
if [[ -z ${ROOT} ]]; then
kernel-install_install_all "${ver}"
fi
}
# @FUNCTION: kernel-install_pkg_prerm
# @DESCRIPTION:
# Stub out mount-boot.eclass.
kernel-install_pkg_prerm() {
debug-print-function ${FUNCNAME} "${@}"
# (no-op)
}
# @FUNCTION: kernel-install_pkg_postrm
# @DESCRIPTION:
# No-op at the moment. Will be used to remove obsolete kernels
# in the future.
kernel-install_pkg_postrm() {
debug-print-function ${FUNCNAME} "${@}"
if [[ -z ${ROOT} ]] && use initramfs; then
local ver="${PV}${KV_LOCALVERSION}"
local image_path=$(dist-kernel_get_image_path)
ebegin "Removing initramfs"
rm -f "${EROOT}/usr/src/linux-${ver}/${image_path%/*}/initrd" &&
find "${EROOT}/usr/src/linux-${ver}" -depth -type d -empty -delete
eend ${?}
fi
}
# @FUNCTION: kernel-install_pkg_config
# @DESCRIPTION:
# Rebuild the initramfs and reinstall the kernel.
kernel-install_pkg_config() {
[[ -z ${ROOT} ]] || die "ROOT!=/ not supported currently"
kernel-install_install_all "${PV}${KV_LOCALVERSION}"
}
_KERNEL_INSTALL_ECLASS=1
fi
EXPORT_FUNCTIONS src_test pkg_preinst pkg_postinst pkg_prerm pkg_postrm
EXPORT_FUNCTIONS pkg_config pkg_pretend

37
eclass/kodi-addon.eclass Normal file
View File

@ -0,0 +1,37 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: kodi-addon.eclass
# @MAINTAINER:
# candrews@gentoo.org
# @SUPPORTED_EAPIS: 4 5 6 7
# @BLURB: Helper for correct building and (importantly) installing Kodi addon packages.
# @DESCRIPTION:
# Provides a src_configure function for correct CMake configuration
case "${EAPI:-0}" in
4|5|6)
inherit cmake-utils multilib
;;
7)
inherit cmake
;;
*) die "EAPI=${EAPI} is not supported" ;;
esac
EXPORT_FUNCTIONS src_configure
# @FUNCTION: kodi-addon_src_configure
# @DESCRIPTION:
# Configure handling for Kodi addons
kodi-addon_src_configure() {
mycmakeargs+=(
-DCMAKE_INSTALL_LIBDIR=${EPREFIX%/}/usr/$(get_libdir)/kodi
)
case ${EAPI} in
4|5|6) cmake-utils_src_configure ;;
7) cmake_src_configure ;;
esac
}

174
eclass/l10n.eclass Normal file
View File

@ -0,0 +1,174 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: l10n.eclass
# @MAINTAINER:
# Ulrich Müller <ulm@gentoo.org>
# @AUTHOR:
# Ben de Groot <yngwin@gentoo.org>
# @BLURB: convenience functions to handle localizations
# @DESCRIPTION:
# The l10n (localization) eclass offers a number of functions to more
# conveniently handle localizations (translations) offered by packages.
# These are meant to prevent code duplication for such boring tasks as
# determining the cross-section between the user's set LINGUAS and what
# is offered by the package.
if [[ -z ${_L10N_ECLASS} ]]; then
_L10N_ECLASS=1
# @ECLASS-VARIABLE: PLOCALES
# @DEFAULT_UNSET
# @DESCRIPTION:
# Variable listing the locales for which localizations are offered by
# the package.
#
# Example: PLOCALES="cy de el_GR en_US pt_BR vi zh_CN"
# @ECLASS-VARIABLE: PLOCALE_BACKUP
# @DEFAULT_UNSET
# @DESCRIPTION:
# In some cases the package fails when none of the offered PLOCALES are
# selected by the user. In that case this variable should be set to a
# default locale (usually 'en' or 'en_US') as backup.
#
# Example: PLOCALE_BACKUP="en_US"
# @FUNCTION: l10n_for_each_locale_do
# @USAGE: <function>
# @DESCRIPTION:
# Convenience function for processing localizations. The parameter should
# be a function (defined in the consuming eclass or ebuild) which takes
# an individual localization as (last) parameter.
#
# Example: l10n_for_each_locale_do install_locale
l10n_for_each_locale_do() {
local locs x
locs=$(l10n_get_locales)
for x in ${locs}; do
"${@}" ${x} || die "failed to process enabled ${x} locale"
done
}
# @FUNCTION: l10n_for_each_disabled_locale_do
# @USAGE: <function>
# @DESCRIPTION:
# Complementary to l10n_for_each_locale_do, this function will process
# locales that are disabled. This could be used for example to remove
# locales from a Makefile, to prevent them from being built needlessly.
l10n_for_each_disabled_locale_do() {
local locs x
locs=$(l10n_get_locales disabled)
for x in ${locs}; do
"${@}" ${x} || die "failed to process disabled ${x} locale"
done
}
# @FUNCTION: l10n_find_plocales_changes
# @USAGE: <translations dir> <filename pre pattern> <filename post pattern>
# @DESCRIPTION:
# Ebuild maintenance helper function to find changes in package offered
# locales when doing a version bump. This could be added for example to
# src_prepare
#
# Example: l10n_find_plocales_changes "${S}/src/translations" "${PN}_" '.ts'
l10n_find_plocales_changes() {
[[ $# -ne 3 ]] && die "Exactly 3 arguments are needed!"
ebegin "Looking in ${1} for new locales"
pushd "${1}" >/dev/null || die "Cannot access ${1}"
local current= x=
for x in ${2}*${3} ; do
x=${x#"${2}"}
x=${x%"${3}"}
current+="${x} "
done
popd >/dev/null
# RHS will be sorted with single spaces so ensure the LHS is too
# before attempting to compare them for equality. See bug #513242.
# Run them both through the same sorting algorithm so we don't have
# to worry about them being the same.
if [[ "$(printf '%s\n' ${PLOCALES} | LC_ALL=C sort)" != "$(printf '%s\n' ${current} | LC_ALL=C sort)" ]] ; then
eend 1 "There are changes in locales! This ebuild should be updated to:"
eerror "PLOCALES=\"${current%[[:space:]]}\""
return 1
else
eend 0
fi
}
# @FUNCTION: l10n_get_locales
# @USAGE: [disabled]
# @DESCRIPTION:
# Determine which LINGUAS the user has enabled that are offered by the
# package, as listed in PLOCALES, and return them. In case no locales
# are selected, fall back on PLOCALE_BACKUP. When the disabled argument
# is given, return the disabled locales instead of the enabled ones.
l10n_get_locales() {
local loc locs
if [[ -z ${LINGUAS+set} ]]; then
# enable all if unset
locs=${PLOCALES}
else
for loc in ${LINGUAS}; do
has ${loc} ${PLOCALES} && locs+="${loc} "
done
fi
[[ -z ${locs} ]] && locs=${PLOCALE_BACKUP}
if [[ ${1} == disabled ]]; then
local disabled_locs
for loc in ${PLOCALES}; do
has ${loc} ${locs} || disabled_locs+="${loc} "
done
locs=${disabled_locs}
fi
printf "%s" "${locs}"
}
# @FUNCTION: strip-linguas
# @USAGE: [<allow LINGUAS>|<-i|-u> <directories of .po files>]
# @DESCRIPTION:
# Make sure that LINGUAS only contains languages that a package can
# support. The first form allows you to specify a list of LINGUAS.
# The -i builds a list of po files found in all the directories and uses
# the intersection of the lists. The -u builds a list of po files found
# in all the directories and uses the union of the lists.
strip-linguas() {
local ls newls nols
if [[ $1 == "-i" ]] || [[ $1 == "-u" ]] ; then
local op=$1; shift
ls=$(find "$1" -name '*.po' -exec basename {} .po ';'); shift
local d f
for d in "$@" ; do
if [[ ${op} == "-u" ]] ; then
newls=${ls}
else
newls=""
fi
for f in $(find "$d" -name '*.po' -exec basename {} .po ';') ; do
if [[ ${op} == "-i" ]] ; then
has ${f} ${ls} && newls="${newls} ${f}"
else
has ${f} ${ls} || newls="${newls} ${f}"
fi
done
ls=${newls}
done
else
ls="$@"
fi
nols=""
newls=""
for f in ${LINGUAS} ; do
if has ${f} ${ls} ; then
newls="${newls} ${f}"
else
nols="${nols} ${f}"
fi
done
[[ -n ${nols} ]] \
&& einfo "Sorry, but ${PN} does not support the LINGUAS:" ${nols}
export LINGUAS=${newls:1}
}
fi

245
eclass/latex-package.eclass Normal file
View File

@ -0,0 +1,245 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: latex-package.eclass
# @MAINTAINER:
# TeX team <tex@gentoo.org>
# @AUTHOR:
# Matthew Turk <satai@gentoo.org>
# Martin Ehmsen <ehmsen@gentoo.org>
# @SUPPORTED_EAPIS: 7
# @BLURB: An eclass for easy installation of LaTeX packages
# @DESCRIPTION:
# This eClass is designed to be easy to use and implement. The vast majority of
# LaTeX packages will only need to define SRC_URI (and sometimes S) for a
# successful installation. If fonts need to be installed, then the variable
# SUPPLIER must also be defined.
#
# However, those packages that contain subdirectories must process each
# subdirectory individually. For example, a package that contains directories
# DIR1 and DIR2 must call latex-package_src_compile() and
# latex-package_src_install() in each directory, as shown here:
#
# src_compile() {
# cd ${S}
# cd DIR1
# latex-package_src_compile
# cd ..
# cd DIR2
# latex-package_src_compile
# }
#
# src_install() {
# cd ${S}
# cd DIR1
# latex-package_src_install
# cd ..
# cd DIR2
# latex-package_src_install
# }
#
# The eClass automatically takes care of rehashing TeX's cache (ls-lR) after
# installation and after removal, as well as creating final documentation from
# TeX files that come with the source. Note that we break TeX layout standards
# by placing documentation in /usr/share/doc/${PN}
#
# For examples of basic installations, check out dev-tex/aastex and
# dev-tex/leaflet .
#
# NOTE: The CTAN "directory grab" function creates files with different MD5
# signatures EVERY TIME. For this reason, if you are grabbing from the CTAN,
# you must either grab each file individually, or find a place to mirror an
# archive of them. (iBiblio)
if [[ -z ${_LATEX_PACKAGE_ECLASS} ]]; then
_LATEX_PACKAGE_ECLASS=1
RDEPEND="virtual/latex-base"
DEPEND="${RDEPEND}
>=sys-apps/texinfo-4.2-r5"
case ${EAPI:-0} in
[0-6])
die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}" ;;
7) ;;
*) die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}" ;;
esac
HOMEPAGE="http://www.tug.org/"
TEXMF="/usr/share/texmf-site"
# @ECLASS-VARIABLE: SUPPLIER
# @DESCRIPTION:
# This refers to the font supplier; it should be overridden (see eclass
# DESCRIPTION above)
SUPPLIER="misc"
# @ECLASS-VARIABLE: LATEX_DOC_ARGUMENTS
# @DESCRIPTION:
# When compiling documentation (.tex/.dtx), this variable will be passed
# to pdflatex as additional argument (e.g. -shell-escape). This variable
# must be set after inherit, as it gets automatically cleared otherwise.
LATEX_DOC_ARGUMENTS=""
# @FUNCTION: latex-package_src_doinstall
# @USAGE: [ module ]
# @DESCRIPTION:
# [module] can be one or more of: sh, sty, cls, fd, clo, def, cfg, dvi, ps, pdf,
# tex, dtx, tfm, vf, afm, pfb, ttf, bst, styles, doc, fonts, bin, or all.
# If [module] is not given, all is assumed.
# It installs the files found in the current directory to the standard locations
# for a TeX installation
latex-package_src_doinstall() {
debug-print function $FUNCNAME $*
# Avoid generating font cache outside of the sandbox
export VARTEXFONTS="${T}/fonts"
# This actually follows the directions for a "single-user" system
# at http://www.ctan.org/installationadvice/ modified for gentoo.
[[ -z ${1} ]] && latex-package_src_install all
while [[ ${1} ]]; do
case ${1} in
"sh")
while IFS= read -r -d '' i; do
dobin ${i}
done < <(find -maxdepth 1 -type f -name "*.${1}" -print0)
;;
"sty" | "cls" | "fd" | "clo" | "def" | "cfg")
while IFS= read -r -d '' i; do
insinto ${TEXMF}/tex/latex/${PN}
doins ${i}
done < <(find -maxdepth 1 -type f -name "*.${1}" -print0)
;;
"dvi" | "ps" | "pdf")
while IFS= read -r -d '' i; do
insinto /usr/share/doc/${PF}
doins ${i}
dosym /usr/share/doc/${PF}/$(basename ${i}) ${TEXMF}/doc/latex/${PN}/${i}
docompress -x /usr/share/doc/${PF}/$(basename ${i})
done < <(find -maxdepth 1 -type f -name "*.${1}" -print0)
;;
"tex" | "dtx")
if ! in_iuse doc || use doc ; then
while IFS= read -r -d '' i; do
[[ -n ${LATEX_PACKAGE_SKIP} ]] &&
has ${i##*/} ${LATEX_PACKAGE_SKIP} &&
continue
einfo "Making documentation: ${i}"
# some macros need compiler called twice, do it here.
set -- pdflatex ${LATEX_DOC_ARGUMENTS} --halt-on-error --interaction=nonstopmode ${i}
if "${@}"; then
"${@}"
else
einfo "pdflatex failed, trying texi2dvi"
texi2dvi -q -c --language=latex ${i} || die
fi
done < <(find -maxdepth 1 -type f -name "*.${1}" -print0)
fi
;;
"tfm" | "vf" | "afm")
while IFS= read -r -d '' i; do
insinto ${TEXMF}/fonts/${1}/${SUPPLIER}/${PN}
doins ${i}
done < <(find -maxdepth 1 -type f -name "*.${1}" -print0)
;;
"pfb")
while IFS= read -r -d '' i; do
insinto ${TEXMF}/fonts/type1/${SUPPLIER}/${PN}
doins ${i}
done < <(find -maxdepth 1 -type f -name "*.pfb" -print0)
;;
"ttf")
while IFS= read -r -d '' i; do
insinto ${TEXMF}/fonts/truetype/${SUPPLIER}/${PN}
doins ${i}
done < <(find -maxdepth 1 -type f -name "*.ttf" -print0)
;;
"bst")
while IFS= read -r -d '' i; do
insinto ${TEXMF}/bibtex/bst/${PN}
doins ${i}
done < <(find -maxdepth 1 -type f -name "*.bst" -print0)
;;
"styles")
latex-package_src_doinstall sty cls fd clo def cfg bst
;;
"doc")
latex-package_src_doinstall tex dtx dvi ps pdf
;;
"fonts")
latex-package_src_doinstall tfm vf afm pfb ttf
;;
"bin")
latex-package_src_doinstall sh
;;
"all")
latex-package_src_doinstall styles fonts bin doc
;;
esac
shift
done
}
# @FUNCTION: latex-package_src_compile
# @DESCRIPTION:
# Calls latex for each *.ins in the current directory in order to generate the
# relevant files that will be installed
latex-package_src_compile() {
debug-print function $FUNCNAME $*
while IFS= read -r -d '' i; do
einfo "Extracting from ${i}"
latex --halt-on-error --interaction=nonstopmode ${i} || die
done < <(find -maxdepth 1 -type f -name "*.ins" -print0)
}
# @FUNCTION: latex-package_src_install
# @DESCRIPTION:
# Installs the package
latex-package_src_install() {
debug-print function $FUNCNAME $*
latex-package_src_doinstall all
einstalldocs
}
# @FUNCTION: latex-package_pkg_postinst
# @DESCRIPTION:
# Calls latex-package_rehash to ensure the TeX installation is consistent with
# the kpathsea database
latex-package_pkg_postinst() {
debug-print function $FUNCNAME $*
latex-package_rehash
}
# @FUNCTION: latex-package_pkg_postrm
# @DESCRIPTION:
# Calls latex-package_rehash to ensure the TeX installation is consistent with
# the kpathsea database
latex-package_pkg_postrm() {
debug-print function $FUNCNAME $*
latex-package_rehash
}
# @FUNCTION: latex-package_rehash
# @DESCRIPTION:
# Rehashes the kpathsea database, according to the current TeX installation
latex-package_rehash() {
debug-print function $FUNCNAME $*
texmf-update
}
EXPORT_FUNCTIONS src_compile src_install pkg_postinst pkg_postrm
fi

210
eclass/libretro-core.eclass Normal file
View File

@ -0,0 +1,210 @@
# Copyright 2018-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: libretro-core.eclass
# @MAINTAINER:
# candrews@gentoo.org
# @AUTHOR:
# Cecil Curry <leycec@gmail.com>
# Craig Andrews <candrews@gentoo.org>
# @SUPPORTED_EAPIS: 6 7
# @BLURB: Simplify libretro core ebuilds
# @DESCRIPTION:
# The libretro eclass is designed to streamline the construction of
# ebuilds for Libretro core ebuilds.
#
# Libretro cores can be found under https://github.com/libretro/
#
# They all use the same basic make based build system, are located
# in the same github account, and do not release named or numbered
# versions (so ebuild versions for git commits are keys).
# This eclass covers those commonalities reducing much duplication
# between the ebuilds.
# @EXAMPLE:
# @CODE
# EAPI=7
#
# LIBRETRO_CORE_NAME="2048"
# LIBRETRO_COMMIT_SHA="45655d3662e4cbcd8afb28e2ee3f5494a75888de"
# KEYWORDS="~amd64 ~x86"
# inherit libretro-core
#
# DESCRIPTION="Port of 2048 puzzle game to the libretro API"
# LICENSE="Unlicense"
# SLOT="0"
# @CODE
if [[ -z ${_LIBRETRO_CORE_ECLASS} ]]; then
_LIBRETRO_CORE_ECLASS=1
IUSE="debug"
# @ECLASS-VARIABLE: LIBRETRO_CORE_NAME
# @REQUIRED
# @DESCRIPTION:
# Name of this Libretro core. The libretro-core_src_install() phase function
# will install the shared library "${S}/${LIBRETRO_CORE_NAME}_libretro.so" as a
# Libretro core. Defaults to the name of the current package with the
# "libretro-" prefix excluded and hyphens replaced with underscores
# (e.g. genesis_plus_gx for libretro-genesis-plus-gx)
if [[ -z "${LIBRETRO_CORE_NAME}" ]]; then
LIBRETRO_CORE_NAME=${PN#libretro-}
LIBRETRO_CORE_NAME=${LIBRETRO_CORE_NAME//-/_}
fi
# @ECLASS-VARIABLE: LIBRETRO_COMMIT_SHA
# @DESCRIPTION:
# Commit SHA used for SRC_URI will die if not set in <9999 ebuilds.
# Needs to be set before inherit.
# @ECLASS-VARIABLE: LIBRETRO_REPO_NAME
# @REQUIRED
# @DESCRIPTION:
# Contains the real repo name of the core formatted as "repouser/reponame".
# Needs to be set before inherit. Otherwise defaults to "libretro/${PN}"
: ${LIBRETRO_REPO_NAME:="libretro/libretro-${LIBRETRO_CORE_NAME}"}
: ${HOMEPAGE:="https://github.com/${LIBRETRO_REPO_NAME}"}
if [[ ${PV} == *9999 ]]; then
: ${EGIT_REPO_URI:="https://github.com/${LIBRETRO_REPO_NAME}.git"}
inherit git-r3
else
[[ -z "${LIBRETRO_COMMIT_SHA}" ]] && die "LIBRETRO_COMMIT_SHA must be set before inherit."
S="${WORKDIR}/${LIBRETRO_REPO_NAME##*/}-${LIBRETRO_COMMIT_SHA}"
: ${SRC_URI:="https://github.com/${LIBRETRO_REPO_NAME}/archive/${LIBRETRO_COMMIT_SHA}.tar.gz -> ${P}.tar.gz"}
fi
inherit flag-o-matic toolchain-funcs
case "${EAPI:-0}" in
6|7)
EXPORT_FUNCTIONS src_unpack src_prepare src_compile src_install
;;
*)
die "EAPI=${EAPI} is not supported" ;;
esac
# @FUNCTION: libretro-core_src_unpack
# @DESCRIPTION:
# The libretro-core src_unpack function which is exported.
#
# This function retrieves the remote Libretro core info files.
libretro-core_src_unpack() {
# If this is a live ebuild, retrieve this core's remote repository.
if [[ ${PV} == *9999 ]]; then
git-r3_src_unpack
# Add used commit SHA for version information, the above could also work.
LIBRETRO_COMMIT_SHA=$(git -C "${WORKDIR}/${P}" rev-parse HEAD)
# Else, unpack this core's local tarball.
else
default_src_unpack
fi
}
# @FUNCTION: libretro-core_src_prepare
# @DESCRIPTION:
# The libretro-core src_prepare function which is exported.
#
# This function prepares the source by making custom modifications.
libretro-core_src_prepare() {
default_src_prepare
# Populate COMMIT for GIT_VERSION
local custom_libretro_commit_sha="\" ${LIBRETRO_COMMIT_SHA:0:7}\""
local makefile
local flags_modified=0
local shopt_saved=$(shopt -p nullglob)
shopt -s nullglob
for makefile in "${S}"/[Mm]akefile* "${S}"/target-libretro/[Mm]akefile*; do
# * Convert CRLF to LF
# * Expand *FLAGS to prevent potential self-references
# * Where LDFLAGS directly define the link version
# script append LDFLAGS and LIBS
# * Where SHARED is used to provide shared linking
# flags ensure final link command includes LDFLAGS
# and LIBS
# * Always use $(CFLAGS) when calling $(CC)
# * Add short-rev to Makefile
sed \
-e 's/\r$//g' \
-e "/flags.*=/s|-O[[:digit:]]|${CFLAGS}|g" \
-e "/CFLAGS.*=/s|-O[[:digit:]]|${CFLAGS}|g" \
-e "/CXXFLAGS.*=/s|-O[[:digit:]]|${CXXFLAGS}|g" \
-e "/.*,--version-script=.*/s|$| ${LDFLAGS} ${LIBS}|g" \
-e "/\$(CC)/s|\(\$(SHARED)\)|\1 ${LDFLAGS} ${LIBS}|" \
-e 's|\(\$(CC)\)|\1 \$(CFLAGS)|g' \
-e "s/GIT_VERSION\s.=.*$/GIT_VERSION=${custom_libretro_commit_sha}/g" \
-i "${makefile}" || die "Failed to use custom cflags in ${makefile}"
done
${shopt_saved}
export OPTFLAGS="${CFLAGS}"
}
# @VARIABLE: myemakeargs
# @DEFAULT_UNSET
# @DESCRIPTION:
# Optional emake arguments as a bash array. Should be defined before calling
# src_compile.
# @CODE
# src_compile() {
# local myemakeargs=(
# $(usex neon "HAVE_NEON=1" "")
# )
# libretro-core_src_compile
# }
# @CODE
# @FUNCTION: libretro-core_src_compile
# @DESCRIPTION:
# The libretro-core src_compile function which is exported.
#
# This function compiles the shared library for this Libretro core.
libretro-core_src_compile() {
# most (if not all) libretro makefiles use DEBUG=1
# to enable additional debug features.
emake CC=$(tc-getCC) CXX=$(tc-getCXX) \
$(usex debug "DEBUG=1" "") "${myemakeargs[@]}" \
$([[ -f makefile.libretro ]] && echo '-f makefile.libretro') \
$([[ -f Makefile.libretro ]] && echo '-f Makefile.libretro')
}
# @VARIABLE: LIBRETRO_CORE_LIB_FILE
# @DEFAULT_UNSET
# @DESCRIPTION:
# Absolute path of this Libretro core's shared library.
# src_install.
# @CODE
# src_install() {
# local LIBRETRO_CORE_LIB_FILE="${S}/somecore_libretro.so"
#
# libretro-core_src_install
# }
# @CODE
# @FUNCTION: libretro-core_src_install
# @DESCRIPTION:
# The libretro-core src_install function which is exported.
#
# This function installs the shared library for this Libretro core.
libretro-core_src_install() {
local LIBRETRO_CORE_LIB_FILE=${LIBRETRO_CORE_LIB_FILE:-"${S}/${LIBRETRO_CORE_NAME}_libretro.so"}
# Absolute path of the directory containing Libretro shared libraries.
local libretro_lib_dir="/usr/$(get_libdir)/libretro"
# If this core's shared library exists, install that.
if [[ -f "${LIBRETRO_CORE_LIB_FILE}" ]]; then
exeinto "${libretro_lib_dir}"
doexe "${LIBRETRO_CORE_LIB_FILE}"
else
# Basename of this library.
local lib_basename="${LIBRETRO_CORE_LIB_FILE##*/}"
# Absolute path to which this library was installed.
local lib_file_target="${ED}${libretro_lib_dir}/${lib_basename}"
# If this library was *NOT* installed, fail.
[[ -f "${lib_file_target}" ]] ||
die "Libretro core shared library \"${lib_file_target}\" not installed."
fi
}
fi # end _LIBRETRO_CORE_ECLASS guard

49
eclass/libtool.eclass Normal file
View File

@ -0,0 +1,49 @@
# Copyright 1999-2018 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: libtool.eclass
# @MAINTAINER:
# base-system@gentoo.org
# @SUPPORTED_EAPIS: 0 1 2 3 4 5 6 7
# @BLURB: quickly update bundled libtool code
# @DESCRIPTION:
# This eclass patches ltmain.sh distributed with libtoolized packages with the
# relink and portage patch among others
#
# Note, this eclass does not require libtool as it only applies patches to
# generated libtool files. We do not run the libtoolize program because that
# requires a regeneration of the main autotool files in order to work properly.
if [[ -z ${_LIBTOOL_ECLASS} ]]; then
_LIBTOOL_ECLASS=1
case ${EAPI:-0} in
0|1|2|3|4|5|6) DEPEND=">=app-portage/elt-patches-20170815" ;;
7) BDEPEND=">=app-portage/elt-patches-20170815" ;;
*) die "${ECLASS}: EAPI ${EAPI} not supported" ;;
esac
inherit toolchain-funcs
# @FUNCTION: elibtoolize
# @USAGE: [dirs] [--portage] [--reverse-deps] [--patch-only] [--remove-internal-dep=xxx] [--shallow] [--no-uclibc]
# @DESCRIPTION:
# Apply a smorgasbord of patches to bundled libtool files. This function
# should always be safe to run. If no directories are specified, then
# ${S} will be searched for appropriate files.
#
# If the --shallow option is used, then only ${S}/ltmain.sh will be patched.
#
# The other options should be avoided in general unless you know what's going on.
elibtoolize() {
type -P eltpatch &>/dev/null || die "eltpatch not found; is app-portage/elt-patches installed?"
ELT_LOGDIR=${T} \
LD=$(tc-getLD) \
eltpatch "${@}" || die "eltpatch failed"
}
uclibctoolize() { die "Use elibtoolize"; }
darwintoolize() { die "Use elibtoolize"; }
fi

963
eclass/linux-info.eclass Normal file
View File

@ -0,0 +1,963 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: linux-info.eclass
# @MAINTAINER:
# kernel@gentoo.org
# @AUTHOR:
# Original author: John Mylchreest <johnm@gentoo.org>
# @BLURB: eclass used for accessing kernel related information
# @DESCRIPTION:
# This eclass is used as a central eclass for accessing kernel
# related information for source or binary already installed.
# It is vital for linux-mod.eclass to function correctly, and is split
# out so that any ebuild behaviour "templates" are abstracted out
# using additional eclasses.
#
# "kernel config" in this file means:
# The .config of the currently installed sources is used as the first
# preference, with a fall-back to bundled config (/proc/config.gz) if available.
#
# Before using any of the config-handling functions in this eclass, you must
# ensure that one of the following functions has been called (in order of
# preference), otherwise you will get bugs like #364041):
# linux-info_pkg_setup
# linux-info_get_any_version
# get_version
# get_running_version
# A Couple of env vars are available to effect usage of this eclass
# These are as follows:
# @ECLASS-VARIABLE: KERNEL_DIR
# @DESCRIPTION:
# A string containing the directory of the target kernel sources. The default value is
# "/usr/src/linux"
# @ECLASS-VARIABLE: CONFIG_CHECK
# @DESCRIPTION:
# A string containing a list of .config options to check for before
# proceeding with the install.
#
# e.g.: CONFIG_CHECK="MTRR"
#
# You can also check that an option doesn't exist by
# prepending it with an exclamation mark (!).
#
# e.g.: CONFIG_CHECK="!MTRR"
#
# To simply warn about a missing option, prepend a '~'.
# It may be combined with '!'.
#
# In general, most checks should be non-fatal. The only time fatal checks should
# be used is for building kernel modules or cases that a compile will fail
# without the option.
#
# This is to allow usage of binary kernels, and minimal systems without kernel
# sources.
# @ECLASS-VARIABLE: ERROR_<CFG>
# @DESCRIPTION:
# A string containing the error message to display when the check against CONFIG_CHECK
# fails. <CFG> should reference the appropriate option used in CONFIG_CHECK.
#
# e.g.: ERROR_MTRR="MTRR exists in the .config but shouldn't!!"
# @ECLASS-VARIABLE: KBUILD_OUTPUT
# @DESCRIPTION:
# A string passed on commandline, or set from the kernel makefile. It contains the directory
# which is to be used as the kernel object directory.
# There are also a couple of variables which are set by this, and shouldn't be
# set by hand. These are as follows:
# @ECLASS-VARIABLE: KV_FULL
# @DESCRIPTION:
# A read-only variable. It's a string containing the full kernel version. ie: 2.6.9-gentoo-johnm-r1
# @ECLASS-VARIABLE: KV_MAJOR
# @DESCRIPTION:
# A read-only variable. It's an integer containing the kernel major version. ie: 2
# @ECLASS-VARIABLE: KV_MINOR
# @DESCRIPTION:
# A read-only variable. It's an integer containing the kernel minor version. ie: 6
# @ECLASS-VARIABLE: KV_PATCH
# @DESCRIPTION:
# A read-only variable. It's an integer containing the kernel patch version. ie: 9
# @ECLASS-VARIABLE: KV_EXTRA
# @DESCRIPTION:
# A read-only variable. It's a string containing the kernel EXTRAVERSION. ie: -gentoo
# @ECLASS-VARIABLE: KV_LOCAL
# @DESCRIPTION:
# A read-only variable. It's a string containing the kernel LOCALVERSION concatenation. ie: -johnm
# @ECLASS-VARIABLE: KV_DIR
# @DESCRIPTION:
# A read-only variable. It's a string containing the kernel source directory, will be null if
# KERNEL_DIR is invalid.
# @ECLASS-VARIABLE: KV_OUT_DIR
# @DESCRIPTION:
# A read-only variable. It's a string containing the kernel object directory, will be KV_DIR unless
# KBUILD_OUTPUT is used. This should be used for referencing .config.
# And to ensure all the weirdness with crosscompile
inherit toolchain-funcs
[[ ${EAPI:-0} == [0123456] ]] && inherit eapi7-ver
EXPORT_FUNCTIONS pkg_setup
IUSE="kernel_linux"
# Overwritable environment Var's
# ---------------------------------------
KERNEL_DIR="${KERNEL_DIR:-${ROOT%/}/usr/src/linux}"
# Bug fixes
# fix to bug #75034
case ${ARCH} in
ppc) BUILD_FIXES="${BUILD_FIXES} TOUT=${T}/.tmp_gas_check";;
ppc64) BUILD_FIXES="${BUILD_FIXES} TOUT=${T}/.tmp_gas_check";;
esac
# @FUNCTION: set_arch_to_kernel
# @DESCRIPTION:
# Set the env ARCH to match what the kernel expects.
set_arch_to_kernel() { export ARCH=$(tc-arch-kernel); }
# @FUNCTION: set_arch_to_portage
# @DESCRIPTION:
# Set the env ARCH to match what portage expects.
set_arch_to_portage() { export ARCH=$(tc-arch); }
# qeinfo "Message"
# -------------------
# qeinfo is a quiet einfo call when EBUILD_PHASE
# should not have visible output.
qout() {
local outputmsg type
type=${1}
shift
outputmsg="${@}"
case "${EBUILD_PHASE}" in
depend) unset outputmsg;;
clean) unset outputmsg;;
preinst) unset outputmsg;;
esac
[ -n "${outputmsg}" ] && ${type} "${outputmsg}"
}
qeinfo() { qout einfo "${@}" ; }
qewarn() { qout ewarn "${@}" ; }
qeerror() { qout eerror "${@}" ; }
# File Functions
# ---------------------------------------
# @FUNCTION: getfilevar
# @USAGE: <variable> <configfile>
# @RETURN: the value of the variable
# @DESCRIPTION:
# It detects the value of the variable defined in the file configfile. This is
# done by including the configfile, and printing the variable with Make.
# It WILL break if your makefile has missing dependencies!
getfilevar() {
local ERROR basefname basedname myARCH="${ARCH}"
ERROR=0
[ -z "${1}" ] && ERROR=1
[ ! -f "${2}" ] && ERROR=1
if [ "${ERROR}" = 1 ]
then
echo -e "\n"
eerror "getfilevar requires 2 variables, with the second a valid file."
eerror " getfilevar <VARIABLE> <CONFIGFILE>"
else
basefname="$(basename ${2})"
basedname="$(dirname ${2})"
unset ARCH
# We use nonfatal because we want the caller to take care of things #373151
[[ ${EAPI:-0} == [0123] ]] && nonfatal() { "$@"; }
echo -e "e:\\n\\t@echo \$(${1})\\ninclude ${basefname}" | \
nonfatal emake -C "${basedname}" M="${T}" ${BUILD_FIXES} -s -f - 2>/dev/null
ARCH=${myARCH}
fi
}
# @FUNCTION: getfilevar_noexec
# @USAGE: <variable> <configfile>
# @RETURN: the value of the variable
# @DESCRIPTION:
# It detects the value of the variable defined in the file configfile.
# This is done with sed matching an expression only. If the variable is defined,
# you will run into problems. See getfilevar for those cases.
getfilevar_noexec() {
local ERROR basefname basedname mycat myARCH="${ARCH}"
ERROR=0
mycat='cat'
[ -z "${1}" ] && ERROR=1
[ ! -f "${2}" ] && ERROR=1
[ "${2%.gz}" != "${2}" ] && mycat='zcat'
if [ "${ERROR}" = 1 ]
then
echo -e "\n"
eerror "getfilevar_noexec requires 2 variables, with the second a valid file."
eerror " getfilevar_noexec <VARIABLE> <CONFIGFILE>"
else
${mycat} "${2}" | \
sed -n \
-e "/^[[:space:]]*${1}[[:space:]]*:\\?=[[:space:]]*\(.*\)\$/{
s,^[^=]*[[:space:]]*=[[:space:]]*,,g ;
s,[[:space:]]*\$,,g ;
p
}"
fi
}
# @ECLASS-VARIABLE: _LINUX_CONFIG_EXISTS_DONE
# @INTERNAL
# @DESCRIPTION:
# This is only set if one of the linux_config_*exists functions has been called.
# We use it for a QA warning that the check for a config has not been performed,
# as linux_chkconfig* in non-legacy mode WILL return an undefined value if no
# config is available at all.
_LINUX_CONFIG_EXISTS_DONE=
linux_config_qa_check() {
local f="$1"
if [ -z "${_LINUX_CONFIG_EXISTS_DONE}" ]; then
ewarn "QA: You called $f before any linux_config_exists!"
ewarn "QA: The return value of $f will NOT guaranteed later!"
fi
if ! use kernel_linux; then
die "$f called on non-Linux system, please fix the ebuild"
fi
}
# @FUNCTION: linux_config_src_exists
# @RETURN: true or false
# @DESCRIPTION:
# It returns true if .config exists in a build directory otherwise false
linux_config_src_exists() {
export _LINUX_CONFIG_EXISTS_DONE=1
use kernel_linux && [[ -n ${KV_OUT_DIR} && -s ${KV_OUT_DIR}/.config ]]
}
# @FUNCTION: linux_config_bin_exists
# @RETURN: true or false
# @DESCRIPTION:
# It returns true if .config exists in /proc, otherwise false
linux_config_bin_exists() {
export _LINUX_CONFIG_EXISTS_DONE=1
use kernel_linux && [[ -s /proc/config.gz ]]
}
# @FUNCTION: linux_config_exists
# @RETURN: true or false
# @DESCRIPTION:
# It returns true if .config exists otherwise false
#
# This function MUST be checked before using any of the linux_chkconfig_*
# functions.
linux_config_exists() {
linux_config_src_exists || linux_config_bin_exists
}
# @FUNCTION: linux_config_path
# @DESCRIPTION:
# Echo the name of the config file to use. If none are found,
# then return false.
linux_config_path() {
if linux_config_src_exists; then
echo "${KV_OUT_DIR}/.config"
elif linux_config_bin_exists; then
echo "/proc/config.gz"
else
return 1
fi
}
# @FUNCTION: require_configured_kernel
# @DESCRIPTION:
# This function verifies that the current kernel is configured (it checks against the existence of .config)
# otherwise it dies.
require_configured_kernel() {
if ! use kernel_linux; then
die "${FUNCNAME}() called on non-Linux system, please fix the ebuild"
fi
if ! linux_config_src_exists; then
qeerror "Could not find a usable .config in the kernel source directory."
qeerror "Please ensure that ${KERNEL_DIR} points to a configured set of Linux sources."
qeerror "If you are using KBUILD_OUTPUT, please set the environment var so that"
qeerror "it points to the necessary object directory so that it might find .config."
die "Kernel not configured; no .config found in ${KV_OUT_DIR}"
fi
get_version || die "Unable to determine configured kernel version"
}
# @FUNCTION: linux_chkconfig_present
# @USAGE: <option>
# @RETURN: true or false
# @DESCRIPTION:
# It checks that CONFIG_<option>=y or CONFIG_<option>=m is present in the current kernel .config
# If linux_config_exists returns false, the results of this are UNDEFINED. You
# MUST call linux_config_exists first.
linux_chkconfig_present() {
linux_config_qa_check linux_chkconfig_present
[[ $(getfilevar_noexec "CONFIG_$1" "$(linux_config_path)") == [my] ]]
}
# @FUNCTION: linux_chkconfig_module
# @USAGE: <option>
# @RETURN: true or false
# @DESCRIPTION:
# It checks that CONFIG_<option>=m is present in the current kernel .config
# If linux_config_exists returns false, the results of this are UNDEFINED. You
# MUST call linux_config_exists first.
linux_chkconfig_module() {
linux_config_qa_check linux_chkconfig_module
[[ $(getfilevar_noexec "CONFIG_$1" "$(linux_config_path)") == m ]]
}
# @FUNCTION: linux_chkconfig_builtin
# @USAGE: <option>
# @RETURN: true or false
# @DESCRIPTION:
# It checks that CONFIG_<option>=y is present in the current kernel .config
# If linux_config_exists returns false, the results of this are UNDEFINED. You
# MUST call linux_config_exists first.
linux_chkconfig_builtin() {
linux_config_qa_check linux_chkconfig_builtin
[[ $(getfilevar_noexec "CONFIG_$1" "$(linux_config_path)") == y ]]
}
# @FUNCTION: linux_chkconfig_string
# @USAGE: <option>
# @RETURN: CONFIG_<option>
# @DESCRIPTION:
# It prints the CONFIG_<option> value of the current kernel .config (it requires a configured kernel).
# If linux_config_exists returns false, the results of this are UNDEFINED. You
# MUST call linux_config_exists first.
linux_chkconfig_string() {
linux_config_qa_check linux_chkconfig_string
getfilevar_noexec "CONFIG_$1" "$(linux_config_path)"
}
# Versioning Functions
# ---------------------------------------
# @FUNCTION: kernel_is
# @USAGE: [-lt -gt -le -ge -eq] <major_number> [minor_number patch_number]
# @RETURN: true or false
# @DESCRIPTION:
# It returns true when the current kernel version satisfies the comparison against the passed version.
# -eq is the default comparison.
#
# @CODE
# For Example where KV = 2.6.9
# kernel_is 2 4 returns false
# kernel_is 2 returns true
# kernel_is 2 6 returns true
# kernel_is 2 6 8 returns false
# kernel_is 2 6 9 returns true
# @CODE
# Note: duplicated in kernel-2.eclass
kernel_is() {
if ! use kernel_linux; then
die "${FUNCNAME}() called on non-Linux system, please fix the ebuild"
fi
# if we haven't determined the version yet, we need to.
linux-info_get_any_version
# Now we can continue
local operator test value
case ${1#-} in
lt) operator="-lt"; shift;;
gt) operator="-gt"; shift;;
le) operator="-le"; shift;;
ge) operator="-ge"; shift;;
eq) operator="-eq"; shift;;
*) operator="-eq";;
esac
[[ $# -gt 3 ]] && die "Error in kernel-2_kernel_is(): too many parameters"
: $(( test = (KV_MAJOR << 16) + (KV_MINOR << 8) + KV_PATCH ))
: $(( value = (${1:-${KV_MAJOR}} << 16) + (${2:-${KV_MINOR}} << 8) + ${3:-${KV_PATCH}} ))
[ ${test} ${operator} ${value} ]
}
get_localversion() {
local lv_list i x
local shopt_save=$(shopt -p nullglob)
shopt -s nullglob
local files=( ${1}/localversion* )
${shopt_save}
# ignore files with ~ in it.
for i in "${files[@]}"; do
[[ -n ${i//*~*} ]] && lv_list="${lv_list} ${i}"
done
for i in ${lv_list}; do
x="${x}$(<${i})"
done
x=${x/ /}
echo ${x}
}
# Check if the Makefile is valid for direct parsing.
# Check status results:
# - PASS, use 'getfilevar' to extract values
# - FAIL, use 'getfilevar_noexec' to extract values
# The check may fail if:
# - make is not present
# - corruption exists in the kernel makefile
get_makefile_extract_function() {
local a='' b='' mkfunc='getfilevar'
a="$(getfilevar VERSION ${KERNEL_MAKEFILE})"
b="$(getfilevar_noexec VERSION ${KERNEL_MAKEFILE})"
[[ "${a}" != "${b}" ]] && mkfunc='getfilevar_noexec'
echo "${mkfunc}"
}
# internal variable, so we know to only print the warning once
get_version_warning_done=
# @FUNCTION: get_version
# @DESCRIPTION:
# It gets the version of the kernel inside KERNEL_DIR and populates the KV_FULL variable
# (if KV_FULL is already set it does nothing).
#
# The kernel version variables (KV_MAJOR, KV_MINOR, KV_PATCH, KV_EXTRA and KV_LOCAL) are also set.
#
# The KV_DIR is set using the KERNEL_DIR env var, the KV_DIR_OUT is set using a valid
# KBUILD_OUTPUT (in a decreasing priority list, we look for the env var, makefile var or the
# symlink /lib/modules/${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}${KV_EXTRA}/build).
get_version() {
if ! use kernel_linux; then
die "${FUNCNAME}() called on non-Linux system, please fix the ebuild"
fi
local tmplocal
# no need to execute this twice assuming KV_FULL is populated.
# we can force by unsetting KV_FULL
[ -n "${KV_FULL}" ] && return 0
# if we dont know KV_FULL, then we need too.
# make sure KV_DIR isnt set since we need to work it out via KERNEL_DIR
unset KV_DIR
# KV_DIR will contain the full path to the sources directory we should use
[ -z "${get_version_warning_done}" ] && \
qeinfo "Determining the location of the kernel source code"
[ -d "${KERNEL_DIR}" ] && KV_DIR="${KERNEL_DIR}"
if [ -z "${KV_DIR}" ]
then
if [ -z "${get_version_warning_done}" ]; then
get_version_warning_done=1
qewarn "Unable to find kernel sources at ${KERNEL_DIR}"
#qeinfo "This package requires Linux sources."
if [ "${KERNEL_DIR}" == "/usr/src/linux" ] ; then
qeinfo "Please make sure that ${KERNEL_DIR} points at your running kernel, "
qeinfo "(or the kernel you wish to build against)."
qeinfo "Alternatively, set the KERNEL_DIR environment variable to the kernel sources location"
else
qeinfo "Please ensure that the KERNEL_DIR environment variable points at full Linux sources of the kernel you wish to compile against."
fi
fi
return 1
fi
# See if the kernel dir is actually an output dir. #454294
if [ -z "${KBUILD_OUTPUT}" -a -L "${KERNEL_DIR}/source" ]; then
KBUILD_OUTPUT=${KERNEL_DIR}
KERNEL_DIR=$(readlink -f "${KERNEL_DIR}/source")
KV_DIR=${KERNEL_DIR}
fi
if [ -z "${get_version_warning_done}" ]; then
qeinfo "Found kernel source directory:"
qeinfo " ${KV_DIR}"
fi
if [ ! -s "${KV_DIR}/Makefile" ]
then
if [ -z "${get_version_warning_done}" ]; then
get_version_warning_done=1
qeerror "Could not find a Makefile in the kernel source directory."
qeerror "Please ensure that ${KERNEL_DIR} points to a complete set of Linux sources"
fi
return 1
fi
# OK so now we know our sources directory, but they might be using
# KBUILD_OUTPUT, and we need this for .config and localversions-*
# so we better find it eh?
# do we pass KBUILD_OUTPUT on the CLI?
local OUTPUT_DIR=${KBUILD_OUTPUT}
# keep track of it
KERNEL_MAKEFILE="${KV_DIR}/Makefile"
if [[ -z ${OUTPUT_DIR} ]]; then
# Decide the function used to extract makefile variables.
local mkfunc=$(get_makefile_extract_function "${KERNEL_MAKEFILE}")
# And if we didn't pass it, we can take a nosey in the Makefile.
OUTPUT_DIR=$(${mkfunc} KBUILD_OUTPUT "${KERNEL_MAKEFILE}")
fi
# And contrary to existing functions I feel we shouldn't trust the
# directory name to find version information as this seems insane.
# So we parse ${KERNEL_MAKEFILE}. We should be able to trust that
# the Makefile is simple enough to use the noexec extract function.
# This has been true for every release thus far, and it's faster
# than using make to evaluate the Makefile every time.
KV_MAJOR=$(getfilevar_noexec VERSION "${KERNEL_MAKEFILE}")
KV_MINOR=$(getfilevar_noexec PATCHLEVEL "${KERNEL_MAKEFILE}")
KV_PATCH=$(getfilevar_noexec SUBLEVEL "${KERNEL_MAKEFILE}")
KV_EXTRA=$(getfilevar_noexec EXTRAVERSION "${KERNEL_MAKEFILE}")
if [ -z "${KV_MAJOR}" -o -z "${KV_MINOR}" -o -z "${KV_PATCH}" ]
then
if [ -z "${get_version_warning_done}" ]; then
get_version_warning_done=1
qeerror "Could not detect kernel version."
qeerror "Please ensure that ${KERNEL_DIR} points to a complete set of Linux sources."
fi
return 1
fi
[ -d "${OUTPUT_DIR}" ] && KV_OUT_DIR="${OUTPUT_DIR}"
if [ -n "${KV_OUT_DIR}" ];
then
qeinfo "Found kernel object directory:"
qeinfo " ${KV_OUT_DIR}"
fi
# and if we STILL have not got it, then we better just set it to KV_DIR
KV_OUT_DIR="${KV_OUT_DIR:-${KV_DIR}}"
# Grab the kernel release from the output directory.
# TODO: we MUST detect kernel.release being out of date, and 'return 1' from
# this function.
if [ -s "${KV_OUT_DIR}"/include/config/kernel.release ]; then
KV_LOCAL=$(<"${KV_OUT_DIR}"/include/config/kernel.release)
elif [ -s "${KV_OUT_DIR}"/.kernelrelease ]; then
KV_LOCAL=$(<"${KV_OUT_DIR}"/.kernelrelease)
else
KV_LOCAL=
fi
# KV_LOCAL currently contains the full release; discard the first bits.
tmplocal=${KV_LOCAL#${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}${KV_EXTRA}}
# If the updated local version was not changed, the tree is not prepared.
# Clear out KV_LOCAL in that case.
# TODO: this does not detect a change in the localversion part between
# kernel.release and the value that would be generated.
if [ "$KV_LOCAL" = "$tmplocal" ]; then
KV_LOCAL=
else
KV_LOCAL=$tmplocal
fi
# and in newer versions we can also pull LOCALVERSION if it is set.
# but before we do this, we need to find if we use a different object directory.
# This *WILL* break if the user is using localversions, but we assume it was
# caught before this if they are.
if [[ -z ${OUTPUT_DIR} ]] ; then
# Try to locate a kernel that is most relevant for us.
for OUTPUT_DIR in "${SYSROOT}" "${ROOT%/}" "" ; do
OUTPUT_DIR+="/lib/modules/${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}${KV_EXTRA}${KV_LOCAL}/build"
if [[ -e ${OUTPUT_DIR} ]] ; then
break
fi
done
fi
# And we should set KV_FULL to the full expanded version
KV_FULL="${KV_MAJOR}.${KV_MINOR}.${KV_PATCH}${KV_EXTRA}${KV_LOCAL}"
qeinfo "Found sources for kernel version:"
qeinfo " ${KV_FULL}"
return 0
}
# @FUNCTION: get_running_version
# @DESCRIPTION:
# It gets the version of the current running kernel and the result is the same as get_version() if the
# function can find the sources.
get_running_version() {
if ! use kernel_linux; then
die "${FUNCNAME}() called on non-Linux system, please fix the ebuild"
fi
KV_FULL=$(uname -r)
if [[ -f ${ROOT%/}/lib/modules/${KV_FULL}/source/Makefile && -f ${ROOT%/}/lib/modules/${KV_FULL}/build/Makefile ]]; then
KERNEL_DIR=$(readlink -f ${ROOT%/}/lib/modules/${KV_FULL}/source)
KBUILD_OUTPUT=$(readlink -f ${ROOT%/}/lib/modules/${KV_FULL}/build)
unset KV_FULL
get_version
return $?
elif [[ -f ${ROOT%/}/lib/modules/${KV_FULL}/source/Makefile ]]; then
KERNEL_DIR=$(readlink -f ${ROOT%/}/lib/modules/${KV_FULL}/source)
unset KV_FULL
get_version
return $?
elif [[ -f ${ROOT%/}/lib/modules/${KV_FULL}/build/Makefile ]]; then
KERNEL_DIR=$(readlink -f ${ROOT%/}/lib/modules/${KV_FULL}/build)
unset KV_FULL
get_version
return $?
else
# This handles a variety of weird kernel versions. Make sure to update
# tests/linux-info_get_running_version.sh if you want to change this.
local kv_full=${KV_FULL//[-+_]*}
KV_MAJOR=$(ver_cut 1 ${kv_full})
KV_MINOR=$(ver_cut 2 ${kv_full})
KV_PATCH=$(ver_cut 3 ${kv_full})
KV_EXTRA="${KV_FULL#${KV_MAJOR}.${KV_MINOR}${KV_PATCH:+.${KV_PATCH}}}"
: ${KV_PATCH:=0}
fi
return 0
}
# This next function is named with the eclass prefix to avoid conflicts with
# some old versionator-like eclass functions.
# @FUNCTION: linux-info_get_any_version
# @DESCRIPTION:
# This attempts to find the version of the sources, and otherwise falls back to
# the version of the running kernel.
linux-info_get_any_version() {
if ! use kernel_linux; then
die "${FUNCNAME}() called on non-Linux system, please fix the ebuild"
fi
if ! get_version; then
ewarn "Unable to calculate Linux Kernel version for build, attempting to use running version"
if ! get_running_version; then
die "Unable to determine any Linux Kernel version, please report a bug"
fi
fi
}
# ebuild check functions
# ---------------------------------------
# @FUNCTION: check_kernel_built
# @DESCRIPTION:
# This function verifies that the current kernel sources have been already prepared otherwise it dies.
check_kernel_built() {
if ! use kernel_linux; then
die "${FUNCNAME}() called on non-Linux system, please fix the ebuild"
fi
# if we haven't determined the version yet, we need to
require_configured_kernel
local versionh_path
if kernel_is -ge 3 7; then
versionh_path="include/generated/uapi/linux/version.h"
else
versionh_path="include/linux/version.h"
fi
if [ ! -f "${KV_OUT_DIR}/${versionh_path}" ]
then
eerror "These sources have not yet been prepared."
eerror "We cannot build against an unprepared tree."
eerror "To resolve this, please type the following:"
eerror
eerror "# cd ${KV_DIR}"
eerror "# make oldconfig"
eerror "# make modules_prepare"
eerror
eerror "Then please try merging this module again."
die "Kernel sources need compiling first"
fi
}
# @FUNCTION: check_modules_supported
# @DESCRIPTION:
# This function verifies that the current kernel support modules (it checks CONFIG_MODULES=y) otherwise it dies.
check_modules_supported() {
if ! use kernel_linux; then
die "${FUNCNAME}() called on non-Linux system, please fix the ebuild"
fi
# if we haven't determined the version yet, we need too.
require_configured_kernel
if ! linux_chkconfig_builtin "MODULES"; then
eerror "These sources do not support loading external modules."
eerror "to be able to use this module please enable \"Loadable modules support\""
eerror "in your kernel, recompile and then try merging this module again."
die "No support for external modules in ${KV_FULL} config"
fi
}
# @FUNCTION: check_extra_config
# @DESCRIPTION:
# It checks the kernel config options specified by CONFIG_CHECK. It dies only when a required config option (i.e.
# the prefix ~ is not used) doesn't satisfy the directive. Ignored on non-Linux systems.
check_extra_config() {
use kernel_linux || return
local config negate die error reworkmodulenames
local soft_errors_count=0 hard_errors_count=0 config_required=0
# store the value of the QA check, because otherwise we won't catch usages
# after if check_extra_config is called AND other direct calls are done
# later.
local old_LINUX_CONFIG_EXISTS_DONE="${_LINUX_CONFIG_EXISTS_DONE}"
# if we haven't determined the version yet, we need to
linux-info_get_any_version
# Determine if we really need a .config. The only time when we don't need
# one is when all of the CONFIG_CHECK options are prefixed with "~".
for config in ${CONFIG_CHECK}; do
if [[ "${config:0:1}" != "~" ]]; then
config_required=1
break
fi
done
if [[ ${config_required} == 0 ]]; then
# In the case where we don't require a .config, we can now bail out
# if the user has no .config as there is nothing to do. Otherwise
# code later will cause a failure due to missing .config.
if ! linux_config_exists; then
ewarn "Unable to check for the following kernel config options due"
ewarn "to absence of any configured kernel sources or compiled"
ewarn "config:"
for config in ${CONFIG_CHECK}; do
config=${config#\~}
config=${config#\!}
local_error="ERROR_${config}"
msg="${!local_error}"
if [[ -z ${msg} ]]; then
local_error="WARNING_${config}"
msg="${!local_error}"
fi
ewarn " - ${config}${msg:+ - }${msg}"
done
ewarn "You're on your own to make sure they are set if needed."
export LINUX_CONFIG_EXISTS_DONE="${old_LINUX_CONFIG_EXISTS_DONE}"
return 0
fi
else
require_configured_kernel
fi
einfo "Checking for suitable kernel configuration options..."
for config in ${CONFIG_CHECK}
do
# if we specify any fatal, ensure we honor them
die=1
error=0
negate=0
reworkmodulenames=0
if [[ ${config:0:1} == "~" ]]; then
die=0
config=${config:1}
elif [[ ${config:0:1} == "@" ]]; then
die=0
reworkmodulenames=1
config=${config:1}
fi
if [[ ${config:0:1} == "!" ]]; then
negate=1
config=${config:1}
fi
if [[ ${negate} == 1 ]]; then
linux_chkconfig_present ${config} && error=2
elif [[ ${reworkmodulenames} == 1 ]]; then
local temp_config="${config//*:}" i n
config="${config//:*}"
if linux_chkconfig_present ${config}; then
for i in ${MODULE_NAMES}; do
n="${i//${temp_config}}"
[[ -z ${n//\(*} ]] && \
MODULE_IGNORE="${MODULE_IGNORE} ${temp_config}"
done
error=2
fi
else
linux_chkconfig_present ${config} || error=1
fi
if [[ ${error} -gt 0 ]]; then
local report_func="eerror" local_error
local_error="ERROR_${config}"
local_error="${!local_error}"
if [[ -z "${local_error}" ]]; then
# using old, deprecated format.
local_error="${config}_ERROR"
local_error="${!local_error}"
fi
if [[ ${die} == 0 && -z "${local_error}" ]]; then
#soft errors can be warnings
local_error="WARNING_${config}"
local_error="${!local_error}"
if [[ -n "${local_error}" ]] ; then
report_func="ewarn"
fi
fi
if [[ -z "${local_error}" ]]; then
[[ ${error} == 1 ]] \
&& local_error="is not set when it should be." \
|| local_error="should not be set. But it is."
local_error="CONFIG_${config}:\t ${local_error}"
fi
if [[ ${die} == 0 ]]; then
${report_func} " ${local_error}"
soft_errors_count=$[soft_errors_count + 1]
else
${report_func} " ${local_error}"
hard_errors_count=$[hard_errors_count + 1]
fi
fi
done
if [[ ${hard_errors_count} -gt 0 ]]; then
eerror "Please check to make sure these options are set correctly."
eerror "Failure to do so may cause unexpected problems."
eerror "Once you have satisfied these options, please try merging"
eerror "this package again."
export LINUX_CONFIG_EXISTS_DONE="${old_LINUX_CONFIG_EXISTS_DONE}"
die "Incorrect kernel configuration options"
elif [[ ${soft_errors_count} -gt 0 ]]; then
ewarn "Please check to make sure these options are set correctly."
ewarn "Failure to do so may cause unexpected problems."
else
eend 0
fi
export LINUX_CONFIG_EXISTS_DONE="${old_LINUX_CONFIG_EXISTS_DONE}"
}
check_zlibinflate() {
if ! use kernel_linux; then
die "${FUNCNAME}() called on non-Linux system, please fix the ebuild"
fi
# if we haven't determined the version yet, we need to
require_configured_kernel
# although I restructured this code - I really really really dont support it!
# bug #27882 - zlib routines are only linked into the kernel
# if something compiled into the kernel calls them
#
# plus, for the cloop module, it appears that there's no way
# to get cloop.o to include a static zlib if CONFIG_MODVERSIONS
# is on
local INFLATE
local DEFLATE
einfo "Determining the usability of ZLIB_INFLATE support in your kernel"
ebegin "checking ZLIB_INFLATE"
linux_chkconfig_builtin CONFIG_ZLIB_INFLATE
eend $? || die
ebegin "checking ZLIB_DEFLATE"
linux_chkconfig_builtin CONFIG_ZLIB_DEFLATE
eend $? || die
local LINENO_START
local LINENO_END
local SYMBOLS
local x
LINENO_END="$(grep -n 'CONFIG_ZLIB_INFLATE y' ${KV_DIR}/lib/Config.in | cut -d : -f 1)"
LINENO_START="$(head -n $LINENO_END ${KV_DIR}/lib/Config.in | grep -n 'if \[' | tail -n 1 | cut -d : -f 1)"
(( LINENO_AMOUNT = $LINENO_END - $LINENO_START ))
(( LINENO_END = $LINENO_END - 1 ))
SYMBOLS="$(head -n $LINENO_END ${KV_DIR}/lib/Config.in | tail -n $LINENO_AMOUNT | sed -e 's/^.*\(CONFIG_[^\" ]*\).*/\1/g;')"
# okay, now we have a list of symbols
# we need to check each one in turn, to see whether it is set or not
for x in $SYMBOLS ; do
if [ "${!x}" = "y" ]; then
# we have a winner!
einfo "${x} ensures zlib is linked into your kernel - excellent"
return 0
fi
done
eerror
eerror "This kernel module requires ZLIB library support."
eerror "You have enabled zlib support in your kernel, but haven't enabled"
eerror "enabled any option that will ensure that zlib is linked into your"
eerror "kernel."
eerror
eerror "Please ensure that you enable at least one of these options:"
eerror
for x in $SYMBOLS ; do
eerror " * $x"
done
eerror
eerror "Please remember to recompile and install your kernel, and reboot"
eerror "into your new kernel before attempting to load this kernel module."
die "Kernel doesn't include zlib support"
}
################################
# Default pkg_setup
# Also used when inheriting linux-mod to force a get_version call
# @FUNCTION: linux-info_pkg_setup
# @DESCRIPTION:
# Force a get_version() call when inherited from linux-mod.eclass and then check if the kernel is configured
# to support the options specified in CONFIG_CHECK (if not null)
linux-info_pkg_setup() {
use kernel_linux || return
linux-info_get_any_version
if kernel_is 2 4; then
if [ "$( gcc-major-version )" -eq "4" ] ; then
echo
ewarn "Be warned !! >=sys-devel/gcc-4.0.0 isn't supported with"
ewarn "linux-2.4 (or modules building against a linux-2.4 kernel)!"
echo
ewarn "Either switch to another gcc-version (via gcc-config) or use a"
ewarn "newer kernel that supports gcc-4."
echo
ewarn "Also be aware that bugreports about gcc-4 not working"
ewarn "with linux-2.4 based ebuilds will be closed as INVALID!"
echo
fi
fi
[ -n "${CONFIG_CHECK}" ] && check_extra_config;
}

769
eclass/linux-mod.eclass Normal file
View File

@ -0,0 +1,769 @@
# Copyright 1999-2021 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: linux-mod.eclass
# @MAINTAINER:
# kernel@gentoo.org
# @AUTHOR:
# John Mylchreest <johnm@gentoo.org>,
# Stefan Schweizer <genstef@gentoo.org>
# @BLURB: It provides the functionality required to install external modules against a kernel source tree.
# @DESCRIPTION:
# This eclass is used to interface with linux-info.eclass in such a way
# to provide the functionality and initial functions
# required to install external modules against a kernel source
# tree.
# A Couple of env vars are available to effect usage of this eclass
# These are as follows:
# @ECLASS-VARIABLE: MODULES_OPTIONAL_USE
# @DESCRIPTION:
# A string containing the USE flag to use for making this eclass optional
# The recommended non-empty value is 'modules'
# @ECLASS-VARIABLE: MODULES_OPTIONAL_USE_IUSE_DEFAULT
# @DESCRIPTION:
# A boolean to control the IUSE default state for the MODULES_OPTIONAL_USE USE
# flag. Default value is unset (false). True represented by 1 or 'on', other
# values including unset treated as false.
# @ECLASS-VARIABLE: KERNEL_DIR
# @DESCRIPTION:
# A string containing the directory of the target kernel sources. The default value is
# "/usr/src/linux"
# @ECLASS-VARIABLE: ECONF_PARAMS
# @DESCRIPTION:
# It's a string containing the parameters to pass to econf.
# If this is not set, then econf isn't run.
# @ECLASS-VARIABLE: BUILD_PARAMS
# @DESCRIPTION:
# It's a string with the parameters to pass to emake.
# @ECLASS-VARIABLE: BUILD_TARGETS
# @DESCRIPTION:
# It's a string with the build targets to pass to make. The default value is "clean module"
# @ECLASS-VARIABLE: MODULE_NAMES
# @DESCRIPTION:
# It's a string containing the modules to be built automatically using the default
# src_compile/src_install. It will only make ${BUILD_TARGETS} once in any directory.
#
# The structure of each MODULE_NAMES entry is as follows:
#
# modulename(libdir:srcdir:objdir)
#
# where:
#
# modulename = name of the module file excluding the .ko
# libdir = place in system modules directory where module is installed (by default it's misc)
# srcdir = place for ebuild to cd to before running make (by default it's ${S})
# objdir = place the .ko and objects are located after make runs (by default it's set to srcdir)
#
# To get an idea of how these variables are used, here's a few lines
# of code from around line 540 in this eclass:
#
# einfo "Installing ${modulename} module"
# cd ${objdir} || die "${objdir} does not exist"
# insinto /lib/modules/${KV_FULL}/${libdir}
# doins ${modulename}.${KV_OBJ} || die "doins ${modulename}.${KV_OBJ} failed"
#
# For example:
# MODULE_NAMES="module_pci(pci:${S}/pci:${S}) module_usb(usb:${S}/usb:${S})"
#
# what this would do is
#
# cd "${S}"/pci
# make ${BUILD_PARAMS} ${BUILD_TARGETS}
# cd "${S}"
# insinto /lib/modules/${KV_FULL}/pci
# doins module_pci.${KV_OBJ}
#
# cd "${S}"/usb
# make ${BUILD_PARAMS} ${BUILD_TARGETS}
# cd "${S}"
# insinto /lib/modules/${KV_FULL}/usb
# doins module_usb.${KV_OBJ}
# There is also support for automated modprobe.d file generation.
# This can be explicitly enabled by setting any of the following variables.
# @ECLASS-VARIABLE: MODULESD_<modulename>_ENABLED
# @DESCRIPTION:
# This is used to disable the modprobe.d file generation otherwise the file will be
# always generated (unless no MODULESD_<modulename>_* variable is provided). Set to "no" to disable
# the generation of the file and the installation of the documentation.
# @ECLASS-VARIABLE: MODULESD_<modulename>_EXAMPLES
# @DESCRIPTION:
# This is a bash array containing a list of examples which should
# be used. If you want us to try and take a guess set this to "guess".
#
# For each array_component it's added an options line in the modprobe.d file
#
# options array_component
#
# where array_component is "<modulename> options" (see modprobe.conf(5))
# @ECLASS-VARIABLE: MODULESD_<modulename>_ALIASES
# @DESCRIPTION:
# This is a bash array containing a list of associated aliases.
#
# For each array_component it's added an alias line in the modprobe.d file
#
# alias array_component
#
# where array_component is "wildcard <modulename>" (see modprobe.conf(5))
# @ECLASS-VARIABLE: MODULESD_<modulename>_ADDITIONS
# @DESCRIPTION:
# This is a bash array containing a list of additional things to
# add to the bottom of the file. This can be absolutely anything.
# Each entry is a new line.
# @ECLASS-VARIABLE: MODULESD_<modulename>_DOCS
# @DESCRIPTION:
# This is a string list which contains the full path to any associated
# documents for <modulename>. These files are installed in the live tree.
# @ECLASS-VARIABLE: KV_OBJ
# @DESCRIPTION:
# It's a read-only variable. It contains the extension of the kernel modules.
inherit eutils linux-info multilib toolchain-funcs
EXPORT_FUNCTIONS pkg_setup pkg_preinst pkg_postinst src_install src_compile pkg_postrm
case ${MODULES_OPTIONAL_USE_IUSE_DEFAULT:-n} in
[nNfF]*|[oO][fF]*|0|-) _modules_optional_use_iuse_default='' ;;
*) _modules_optional_use_iuse_default='+' ;;
esac
[[ -n "${_modules_optional_use_iuse_default}" ]] && case ${EAPI:-0} in
0) die "EAPI=${EAPI} is not supported with MODULES_OPTIONAL_USE_IUSE_DEFAULT due to lack of IUSE defaults" ;;
esac
IUSE="kernel_linux dist-kernel
${MODULES_OPTIONAL_USE:+${_modules_optional_use_iuse_default}}${MODULES_OPTIONAL_USE}"
SLOT="0"
RDEPEND="
${MODULES_OPTIONAL_USE}${MODULES_OPTIONAL_USE:+? (}
kernel_linux? (
sys-apps/kmod[tools]
dist-kernel? ( virtual/dist-kernel:= )
)
${MODULES_OPTIONAL_USE:+)}"
DEPEND="${RDEPEND}
${MODULES_OPTIONAL_USE}${MODULES_OPTIONAL_USE:+? (}
sys-apps/sed
kernel_linux? ( virtual/linux-sources virtual/libelf )
${MODULES_OPTIONAL_USE:+)}"
# eclass utilities
# ----------------------------------
check_vermagic() {
debug-print-function ${FUNCNAME} $*
local curr_gcc_ver=$(gcc -dumpversion)
local tmpfile old_chost old_gcc_ver result=0
[ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
tmpfile=`find "${KV_DIR}/" -iname "*.o.cmd" -exec grep usr/lib/gcc {} \; -quit`
tmpfile=${tmpfile//*usr/lib}
tmpfile=${tmpfile//\/include*}
old_chost=${tmpfile//*gcc\/}
old_chost=${old_chost//\/*}
old_gcc_ver=${tmpfile//*\/}
if [[ -z ${old_gcc_ver} || -z ${old_chost} ]]; then
ewarn ""
ewarn "Unable to detect what version of GCC was used to compile"
ewarn "the kernel. Build will continue, but you may experience problems."
elif [[ ${curr_gcc_ver} != ${old_gcc_ver} ]]; then
ewarn ""
ewarn "The version of GCC you are using (${curr_gcc_ver}) does"
ewarn "not match the version of GCC used to compile the"
ewarn "kernel (${old_gcc_ver})."
result=1
elif [[ ${CHOST} != ${old_chost} ]]; then
ewarn ""
ewarn "The current CHOST (${CHOST}) does not match the chost"
ewarn "used when compiling the kernel (${old_chost})."
result=1
fi
if [[ ${result} -gt 0 ]]; then
ewarn ""
ewarn "Build will not continue, because you will experience problems."
ewarn "To fix this either change the version of GCC you wish to use"
ewarn "to match the kernel, or recompile the kernel first."
die "GCC Version Mismatch."
fi
}
# @FUNCTION: use_m
# @RETURN: true or false
# @DESCRIPTION:
# It checks if the kernel version is greater than 2.6.5.
use_m() {
debug-print-function ${FUNCNAME} $*
# if we haven't determined the version yet, we need too.
get_version;
# if the kernel version is greater than 2.6.6 then we should use
# M= instead of SUBDIRS=
[ ${KV_MAJOR} -ge 3 ] && return 0
[ ${KV_MAJOR} -eq 2 -a ${KV_MINOR} -gt 5 -a ${KV_PATCH} -gt 5 ] && \
return 0 || return 1
}
# @FUNCTION: convert_to_m
# @USAGE: </path/to/the/file>
# @DESCRIPTION:
# It converts a file (e.g. a makefile) to use M= instead of SUBDIRS=
convert_to_m() {
debug-print-function ${FUNCNAME} $*
if use_m
then
[ ! -f "${1}" ] && \
die "convert_to_m() requires a filename as an argument"
ebegin "Converting ${1/${WORKDIR}\//} to use M= instead of SUBDIRS="
sed -i 's:SUBDIRS=:M=:g' "${1}"
eend $?
fi
}
# internal function
#
# FUNCTION: update_depmod
# DESCRIPTION:
# It updates the modules.dep file for the current kernel.
update_depmod() {
debug-print-function ${FUNCNAME} $*
# if we haven't determined the version yet, we need too.
get_version;
ebegin "Updating module dependencies for ${KV_FULL}"
if [ -r "${KV_OUT_DIR}"/System.map ]
then
depmod -ae -F "${KV_OUT_DIR}"/System.map -b "${ROOT:-/}" ${KV_FULL}
eend $?
else
ewarn
ewarn "${KV_OUT_DIR}/System.map not found."
ewarn "You must manually update the kernel module dependencies using depmod."
eend 1
ewarn
fi
}
# internal function
#
# FUNCTION: move_old_moduledb
# DESCRIPTION:
# It updates the location of the database used by the module-rebuild utility.
move_old_moduledb() {
debug-print-function ${FUNCNAME} $*
local OLDDIR="${ROOT%/}"/usr/share/module-rebuild
local NEWDIR="${ROOT%/}"/var/lib/module-rebuild
if [[ -f "${OLDDIR}"/moduledb ]]; then
[[ ! -d "${NEWDIR}" ]] && mkdir -p "${NEWDIR}"
[[ ! -f "${NEWDIR}"/moduledb ]] && \
mv "${OLDDIR}"/moduledb "${NEWDIR}"/moduledb
rm -f "${OLDDIR}"/*
rmdir "${OLDDIR}"
fi
}
# internal function
#
# FUNCTION: update_moduledb
# DESCRIPTION:
# It adds the package to the /var/lib/module-rebuild/moduledb database used by the module-rebuild utility.
update_moduledb() {
debug-print-function ${FUNCNAME} $*
local MODULEDB_DIR="${ROOT%/}"/var/lib/module-rebuild
move_old_moduledb
if [[ ! -f "${MODULEDB_DIR}"/moduledb ]]; then
[[ ! -d "${MODULEDB_DIR}" ]] && mkdir -p "${MODULEDB_DIR}"
touch "${MODULEDB_DIR}"/moduledb
fi
if ! grep -qs ${CATEGORY}/${PN}-${PVR} "${MODULEDB_DIR}"/moduledb ; then
einfo "Adding module to moduledb."
echo "a:1:${CATEGORY}/${PN}-${PVR}" >> "${MODULEDB_DIR}"/moduledb
fi
}
# internal function
#
# FUNCTION: remove_moduledb
# DESCRIPTION:
# It removes the package from the /var/lib/module-rebuild/moduledb database used by
# the module-rebuild utility.
remove_moduledb() {
debug-print-function ${FUNCNAME} $*
local MODULEDB_DIR="${ROOT%/}"/var/lib/module-rebuild
move_old_moduledb
if grep -qs ${CATEGORY}/${PN}-${PVR} "${MODULEDB_DIR}"/moduledb ; then
einfo "Removing ${CATEGORY}/${PN}-${PVR} from moduledb."
sed -i -e "/.*${CATEGORY}\/${PN}-${PVR}.*/d" "${MODULEDB_DIR}"/moduledb
fi
}
# @FUNCTION: set_kvobj
# @DESCRIPTION:
# It sets the KV_OBJ variable.
set_kvobj() {
debug-print-function ${FUNCNAME} $*
if kernel_is ge 2 6
then
KV_OBJ="ko"
else
KV_OBJ="o"
fi
# Do we really need to know this?
# Lets silence it.
# einfo "Using KV_OBJ=${KV_OBJ}"
}
get-KERNEL_CC() {
debug-print-function ${FUNCNAME} $*
if [[ -n ${KERNEL_CC} ]] ; then
echo "${KERNEL_CC}"
return
fi
local kernel_cc
if [ -n "${KERNEL_ABI}" ]; then
# In future, an arch might want to define CC_$ABI
#kernel_cc="$(get_abi_CC)"
#[ -z "${kernel_cc}" ] &&
kernel_cc="$(tc-getCC $(ABI=${KERNEL_ABI} get_abi_CHOST))"
else
kernel_cc=$(tc-getCC)
fi
echo "${kernel_cc}"
}
# internal function
#
# FUNCTION:
# USAGE: /path/to/the/modulename_without_extension
# RETURN: A file in /etc/modprobe.d
# DESCRIPTION:
# This function will generate and install the neccessary modprobe.d file from the
# information contained in the modules exported parms.
# (see the variables MODULESD_<modulename>_ENABLED, MODULESD_<modulename>_EXAMPLES,
# MODULESD_<modulename>_ALIASES, MODULESD_<modulename>_ADDITION and MODULESD_<modulename>_DOCS).
#
# At the end the documentation specified with MODULESD_<modulename>_DOCS is installed.
generate_modulesd() {
debug-print-function ${FUNCNAME} $*
[ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
local currm_path currm currm_t t myIFS myVAR
local module_docs module_enabled module_aliases \
module_additions module_examples module_modinfo module_opts
for currm_path in ${@}
do
currm=${currm_path//*\/}
currm=$(echo ${currm} | tr '[:lower:]' '[:upper:]')
currm_t=${currm}
while [[ -z ${currm_t//*-*} ]]; do
currm_t=${currm_t/-/_}
done
module_docs="$(eval echo \${MODULESD_${currm_t}_DOCS})"
module_enabled="$(eval echo \${MODULESD_${currm_t}_ENABLED})"
module_aliases="$(eval echo \${#MODULESD_${currm_t}_ALIASES[*]})"
module_additions="$(eval echo \${#MODULESD_${currm_t}_ADDITIONS[*]})"
module_examples="$(eval echo \${#MODULESD_${currm_t}_EXAMPLES[*]})"
[[ ${module_aliases} -eq 0 ]] && unset module_aliases
[[ ${module_additions} -eq 0 ]] && unset module_additions
[[ ${module_examples} -eq 0 ]] && unset module_examples
# If we specify we dont want it, then lets exit, otherwise we assume
# that if its set, we do want it.
[[ ${module_enabled} == no ]] && return 0
# unset any unwanted variables.
for t in ${!module_*}
do
[[ -z ${!t} ]] && unset ${t}
done
[[ -z ${!module_*} ]] && return 0
# OK so now if we have got this far, then we know we want to continue
# and generate the modprobe.d file.
module_modinfo="$(modinfo -p ${currm_path}.${KV_OBJ})"
module_config="${T}/modulesd-${currm}"
ebegin "Preparing file for modprobe.d"
#-----------------------------------------------------------------------
echo "# modprobe.d configuration file for ${currm}" >> "${module_config}"
#-----------------------------------------------------------------------
[[ -n ${module_docs} ]] && \
echo "# For more information please read:" >> "${module_config}"
for t in ${module_docs}
do
echo "# ${t//*\/}" >> "${module_config}"
done
echo >> "${module_config}"
#-----------------------------------------------------------------------
if [[ ${module_aliases} -gt 0 ]]
then
echo "# Internal Aliases - Do not edit" >> "${module_config}"
echo "# ------------------------------" >> "${module_config}"
for((t=0; t<${module_aliases}; t++))
do
echo "alias $(eval echo \${MODULESD_${currm}_ALIASES[$t]})" \
>> "${module_config}"
done
echo '' >> "${module_config}"
fi
#-----------------------------------------------------------------------
if [[ -n ${module_modinfo} ]]
then
echo >> "${module_config}"
echo "# Configurable module parameters" >> "${module_config}"
echo "# ------------------------------" >> "${module_config}"
myIFS="${IFS}"
IFS="$(echo -en "\n\b")"
for t in ${module_modinfo}
do
myVAR="$(echo ${t#*:} | grep -o "[^ ]*[0-9][ =][^ ]*" | tail -1 | grep -o "[0-9]")"
if [[ -n ${myVAR} ]]
then
module_opts="${module_opts} ${t%%:*}:${myVAR}"
fi
echo -e "# ${t%%:*}:\t${t#*:}" >> "${module_config}"
done
IFS="${myIFS}"
echo '' >> "${module_config}"
fi
#-----------------------------------------------------------------------
if [[ $(eval echo \${MODULESD_${currm}_ALIASES[0]}) == guess ]]
then
# So lets do some guesswork eh?
if [[ -n ${module_opts} ]]
then
echo "# For Example..." >> "${module_config}"
echo "# --------------" >> "${module_config}"
for t in ${module_opts}
do
echo "# options ${currm} ${t//:*}=${t//*:}" >> "${module_config}"
done
echo '' >> "${module_config}"
fi
elif [[ ${module_examples} -gt 0 ]]
then
echo "# For Example..." >> "${module_config}"
echo "# --------------" >> "${module_config}"
for((t=0; t<${module_examples}; t++))
do
echo "options $(eval echo \${MODULESD_${currm}_EXAMPLES[$t]})" \
>> "${module_config}"
done
echo '' >> "${module_config}"
fi
#-----------------------------------------------------------------------
if [[ ${module_additions} -gt 0 ]]
then
for((t=0; t<${module_additions}; t++))
do
echo "$(eval echo \${MODULESD_${currm}_ADDITIONS[$t]})" \
>> "${module_config}"
done
echo '' >> "${module_config}"
fi
#-----------------------------------------------------------------------
# then we install it
insinto /etc/modprobe.d
newins "${module_config}" "${currm_path//*\/}.conf"
# and install any documentation we might have.
[[ -n ${module_docs} ]] && dodoc ${module_docs}
done
eend 0
return 0
}
# internal function
#
# FUNCTION: find_module_params
# USAGE: A string "NAME(LIBDIR:SRCDIR:OBJDIR)"
# RETURN: The string "modulename:NAME libdir:LIBDIR srcdir:SRCDIR objdir:OBJDIR"
# DESCRIPTION:
# Analyze the specification NAME(LIBDIR:SRCDIR:OBJDIR) of one module as described in MODULE_NAMES.
find_module_params() {
debug-print-function ${FUNCNAME} $*
local matched_offset=0 matched_opts=0 test="${@}" temp_var result
local i=0 y=0 z=0
for((i=0; i<=${#test}; i++))
do
case ${test:${i}:1} in
\() matched_offset[0]=${i};;
\:) matched_opts=$((${matched_opts} + 1));
matched_offset[${matched_opts}]="${i}";;
\)) matched_opts=$((${matched_opts} + 1));
matched_offset[${matched_opts}]="${i}";;
esac
done
for((i=0; i<=${matched_opts}; i++))
do
# i = offset were working on
# y = last offset
# z = current offset - last offset
# temp_var = temporary name
case ${i} in
0) tempvar=${test:0:${matched_offset[0]}};;
*) y=$((${matched_offset[$((${i} - 1))]} + 1))
z=$((${matched_offset[${i}]} - ${matched_offset[$((${i} - 1))]}));
z=$((${z} - 1))
tempvar=${test:${y}:${z}};;
esac
case ${i} in
0) result="${result} modulename:${tempvar}";;
1) result="${result} libdir:${tempvar}";;
2) result="${result} srcdir:${tempvar}";;
3) result="${result} objdir:${tempvar}";;
esac
done
echo ${result}
}
# default ebuild functions
# --------------------------------
# @FUNCTION: linux-mod_pkg_setup
# @DESCRIPTION:
# It checks the CONFIG_CHECK options (see linux-info.eclass(5)), verifies that the kernel is
# configured, verifies that the sources are prepared, verifies that the modules support is builtin
# in the kernel and sets the object extension KV_OBJ.
linux-mod_pkg_setup() {
debug-print-function ${FUNCNAME} $*
[ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
local is_bin="${MERGE_TYPE}"
# If we are installing a binpkg, take a different path.
# use MERGE_TYPE if available (eapi>=4); else use non-PMS EMERGE_FROM (eapi<4)
if has ${EAPI} 0 1 2 3; then
is_bin=${EMERGE_FROM}
fi
if [[ ${is_bin} == binary ]]; then
linux-mod_pkg_setup_binary
return
fi
# External modules use kernel symbols (bug #591832)
CONFIG_CHECK+=" !TRIM_UNUSED_KSYMS"
linux-info_pkg_setup;
require_configured_kernel
check_kernel_built;
strip_modulenames;
[[ -n ${MODULE_NAMES} ]] && check_modules_supported
set_kvobj;
# Commented out with permission from johnm until a fixed version for arches
# who intentionally use different kernel and userland compilers can be
# introduced - Jason Wever <weeve@gentoo.org>, 23 Oct 2005
#check_vermagic;
}
# @FUNCTION: linux-mod_pkg_setup_binary
# @DESCRIPTION:
# Perform all kernel option checks non-fatally, as the .config and
# /proc/config.gz might not be present. Do not do anything that requires kernel
# sources.
linux-mod_pkg_setup_binary() {
debug-print-function ${FUNCNAME} $*
local new_CONFIG_CHECK
# ~ needs always to be quoted, else bash expands it.
for config in $CONFIG_CHECK ; do
optional='~'
[[ ${config:0:1} == "~" ]] && optional=''
new_CONFIG_CHECK="${new_CONFIG_CHECK} ${optional}${config}"
done
CONFIG_CHECK="${new_CONFIG_CHECK}"
linux-info_pkg_setup;
}
strip_modulenames() {
debug-print-function ${FUNCNAME} $*
local i
for i in ${MODULE_IGNORE}; do
MODULE_NAMES=${MODULE_NAMES//${i}(*}
done
}
# @FUNCTION: linux-mod_src_compile
# @DESCRIPTION:
# It compiles all the modules specified in MODULE_NAMES. For each module the econf command is
# executed only if ECONF_PARAMS is defined, the name of the target is specified by BUILD_TARGETS
# while the options are in BUILD_PARAMS (all the modules share these variables). The compilation
# happens inside ${srcdir}.
#
# Look at the description of these variables for more details.
linux-mod_src_compile() {
debug-print-function ${FUNCNAME} $*
[ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
local modulename libdir srcdir objdir i n myABI="${ABI}"
set_arch_to_kernel
ABI="${KERNEL_ABI}"
[[ -n ${KERNEL_DIR} ]] && addpredict "${KERNEL_DIR}/null.dwo"
BUILD_TARGETS=${BUILD_TARGETS:-clean module}
strip_modulenames;
cd "${S}"
touch Module.symvers
for i in ${MODULE_NAMES}
do
unset libdir srcdir objdir
for n in $(find_module_params ${i})
do
eval ${n/:*}=${n/*:/}
done
libdir=${libdir:-misc}
srcdir=${srcdir:-${S}}
objdir=${objdir:-${srcdir}}
if [ ! -f "${srcdir}/.built" ];
then
cd "${srcdir}"
ln -s "${S}"/Module.symvers Module.symvers
einfo "Preparing ${modulename} module"
if [[ -n ${ECONF_PARAMS} ]]
then
econf ${ECONF_PARAMS} || \
die "Unable to run econf ${ECONF_PARAMS}"
fi
# This looks messy, but it is needed to handle multiple variables
# being passed in the BUILD_* stuff where the variables also have
# spaces that must be preserved. If don't do this, then the stuff
# inside the variables gets used as targets for Make, which then
# fails.
eval "emake HOSTCC=\"$(tc-getBUILD_CC)\" \
CROSS_COMPILE=${CHOST}- \
LDFLAGS=\"$(get_abi_LDFLAGS)\" \
${BUILD_FIXES} \
${BUILD_PARAMS} \
${BUILD_TARGETS} " \
|| die "Unable to emake HOSTCC="$(tc-getBUILD_CC)" CROSS_COMPILE=${CHOST}- LDFLAGS="$(get_abi_LDFLAGS)" ${BUILD_FIXES} ${BUILD_PARAMS} ${BUILD_TARGETS}"
cd "${OLDPWD}"
touch "${srcdir}"/.built
fi
done
set_arch_to_portage
ABI="${myABI}"
}
# @FUNCTION: linux-mod_src_install
# @DESCRIPTION:
# It install the modules specified in MODULES_NAME. The modules should be inside the ${objdir}
# directory and they are installed inside /lib/modules/${KV_FULL}/${libdir}.
#
# The modprobe.d configuration file is automatically generated if the
# MODULESD_<modulename>_* variables are defined. The only way to stop this process is by
# setting MODULESD_<modulename>_ENABLED=no. At the end the documentation specified via
# MODULESD_<modulename>_DOCS is also installed.
#
# Look at the description of these variables for more details.
linux-mod_src_install() {
debug-print-function ${FUNCNAME} $*
[ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
local modulename libdir srcdir objdir i n
[[ -n ${KERNEL_DIR} ]] && addpredict "${KERNEL_DIR}/null.dwo"
strip_modulenames;
for i in ${MODULE_NAMES}
do
unset libdir srcdir objdir
for n in $(find_module_params ${i})
do
eval ${n/:*}=${n/*:/}
done
libdir=${libdir:-misc}
srcdir=${srcdir:-${S}}
objdir=${objdir:-${srcdir}}
einfo "Installing ${modulename} module"
cd "${objdir}" || die "${objdir} does not exist"
insinto /lib/modules/${KV_FULL}/${libdir}
doins ${modulename}.${KV_OBJ} || die "doins ${modulename}.${KV_OBJ} failed"
cd "${OLDPWD}"
generate_modulesd "${objdir}/${modulename}"
done
}
# @FUNCTION: linux-mod_pkg_preinst
# @DESCRIPTION:
# It checks what to do after having merged the package.
linux-mod_pkg_preinst() {
debug-print-function ${FUNCNAME} $*
[ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
[ -d "${D%/}/lib/modules" ] && UPDATE_DEPMOD=true || UPDATE_DEPMOD=false
[ -d "${D%/}/lib/modules" ] && UPDATE_MODULEDB=true || UPDATE_MODULEDB=false
}
# @FUNCTION: linux-mod_pkg_postinst
# @DESCRIPTION:
# It executes /sbin/depmod and adds the package to the /var/lib/module-rebuild/moduledb
# database (if ${D}/lib/modules is created)"
linux-mod_pkg_postinst() {
debug-print-function ${FUNCNAME} $*
[ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
${UPDATE_DEPMOD} && update_depmod;
${UPDATE_MODULEDB} && update_moduledb;
}
# @FUNCTION: linux-mod_pkg_postrm
# @DESCRIPTION:
# It removes the package from the /var/lib/module-rebuild/moduledb database but it doens't
# call /sbin/depmod because the modules are still installed.
linux-mod_pkg_postrm() {
debug-print-function ${FUNCNAME} $*
[ -n "${MODULES_OPTIONAL_USE}" ] && use !${MODULES_OPTIONAL_USE} && return
remove_moduledb;
}

228
eclass/llvm.eclass Normal file
View File

@ -0,0 +1,228 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: llvm.eclass
# @MAINTAINER:
# Michał Górny <mgorny@gentoo.org>
# @AUTHOR:
# Michał Górny <mgorny@gentoo.org>
# @SUPPORTED_EAPIS: 6 7
# @BLURB: Utility functions to build against slotted LLVM
# @DESCRIPTION:
# The llvm.eclass provides utility functions that can be used to build
# against specific version of slotted LLVM (with fallback to :0 for old
# versions).
#
# This eclass does not generate dependency strings. You need to write
# a proper dependency string yourself to guarantee that appropriate
# version of LLVM is installed.
#
# Example use for a package supporting LLVM 5 to 7:
# @CODE
# inherit cmake-utils llvm
#
# RDEPEND="
# <sys-devel/llvm-8:=
# || (
# sys-devel/llvm:7
# sys-devel/llvm:6
# sys-devel/llvm:5
# )
# "
# DEPEND=${RDEPEND}
#
# LLVM_MAX_SLOT=7
#
# # only if you need to define one explicitly
# pkg_setup() {
# llvm_pkg_setup
# do-something-else
# }
# @CODE
#
# Example for a package needing LLVM+clang w/ a specific target:
# @CODE
# inherit cmake-utils llvm
#
# # note: do not use := on both clang and llvm, it can match different
# # slots then. clang pulls llvm in, so we can skip the latter.
# RDEPEND="
# >=sys-devel/clang-6:=[llvm_targets_AMDGPU(+)]
# "
# DEPEND=${RDEPEND}
#
# llvm_check_deps() {
# has_version -d "sys-devel/clang:${LLVM_SLOT}[llvm_targets_AMDGPU(+)]"
# }
# @CODE
case "${EAPI:-0}" in
0|1|2|3|4|5)
die "Unsupported EAPI=${EAPI:-0} (too old) for ${ECLASS}"
;;
6|7)
;;
*)
die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
;;
esac
EXPORT_FUNCTIONS pkg_setup
if [[ ! ${_LLVM_ECLASS} ]]; then
# make sure that the versions installing straight into /usr/bin
# are uninstalled
DEPEND="!!sys-devel/llvm:0"
# @ECLASS-VARIABLE: LLVM_MAX_SLOT
# @DEFAULT_UNSET
# @DESCRIPTION:
# Highest LLVM slot supported by the package. Needs to be set before
# llvm_pkg_setup is called. If unset, no upper bound is assumed.
# @ECLASS-VARIABLE: _LLVM_KNOWN_SLOTS
# @INTERNAL
# @DESCRIPTION:
# Correct values of LLVM slots, newest first.
declare -g -r _LLVM_KNOWN_SLOTS=( {12..8} )
# @FUNCTION: get_llvm_prefix
# @USAGE: [-b|-d] [<max_slot>]
# @DESCRIPTION:
# Find the newest LLVM install that is acceptable for the package,
# and print an absolute path to it.
#
# If -b is specified, the checks are performed relative to BROOT,
# and BROOT-path is returned. This is appropriate when your package
# calls llvm-config executable. -b is supported since EAPI 7.
#
# If -d is specified, the checks are performed relative to ESYSROOT,
# and ESYSROOT-path is returned. This is appropriate when your package
# uses CMake find_package(LLVM). -d is the default.
#
# If <max_slot> is specified, then only LLVM versions that are not newer
# than <max_slot> will be considered. Otherwise, all LLVM versions would
# be considered acceptable. The function does not support specifying
# minimal supported version -- the developer must ensure that a version
# new enough is installed via providing appropriate dependencies.
#
# If llvm_check_deps() function is defined within the ebuild, it will
# be called to verify whether a particular slot is accepable. Within
# the function scope, LLVM_SLOT will be defined to the SLOT value
# (0, 4, 5...). The function should return a true status if the slot
# is acceptable, false otherwise. If llvm_check_deps() is not defined,
# the function defaults to checking whether sys-devel/llvm:${LLVM_SLOT}
# is installed.
get_llvm_prefix() {
debug-print-function ${FUNCNAME} "${@}"
local hv_switch=-d
while [[ ${1} == -* ]]; do
case ${1} in
-b|-d) hv_switch=${1};;
*) break;;
esac
shift
done
local prefix=
if [[ ${EAPI} != 6 ]]; then
case ${hv_switch} in
-b)
prefix=${BROOT}
;;
-d)
prefix=${ESYSROOT}
;;
esac
else
case ${hv_switch} in
-b)
die "${FUNCNAME} -b is not supported in EAPI ${EAPI}"
;;
-d)
prefix=${EPREFIX}
hv_switch=
;;
esac
fi
local max_slot=${1}
local slot
for slot in "${_LLVM_KNOWN_SLOTS[@]}"; do
# skip higher slots
if [[ -n ${max_slot} ]]; then
if [[ ${max_slot} == ${slot} ]]; then
max_slot=
else
continue
fi
fi
if declare -f llvm_check_deps >/dev/null; then
local LLVM_SLOT=${slot}
llvm_check_deps || continue
else
# check if LLVM package is installed
has_version ${hv_switch} "sys-devel/llvm:${slot}" || continue
fi
echo "${prefix}/usr/lib/llvm/${slot}"
return
done
# max_slot should have been unset in the iteration
if [[ -n ${max_slot} ]]; then
die "${FUNCNAME}: invalid max_slot=${max_slot}"
fi
die "No LLVM slot${1:+ <= ${1}} found installed!"
}
# @FUNCTION: llvm_pkg_setup
# @DESCRIPTION:
# Prepend the appropriate executable directory for the newest
# acceptable LLVM slot to the PATH. For path determination logic,
# please see the get_llvm_prefix documentation.
#
# The highest acceptable LLVM slot can be set in LLVM_MAX_SLOT variable.
# If it is unset or empty, any slot is acceptable.
#
# The PATH manipulation is only done for source builds. The function
# is a no-op when installing a binary package.
#
# If any other behavior is desired, the contents of the function
# should be inlined into the ebuild and modified as necessary.
llvm_pkg_setup() {
debug-print-function ${FUNCNAME} "${@}"
if [[ ${MERGE_TYPE} != binary ]]; then
local llvm_path=$(get_llvm_prefix "${LLVM_MAX_SLOT}")/bin
local IFS=:
local split_path=( ${PATH} )
local new_path=()
local x added=
# prepend new path before first LLVM version found
for x in "${split_path[@]}"; do
if [[ ${x} == */usr/lib/llvm/*/bin ]]; then
if [[ ${x} != ${llvm_path} ]]; then
new_path+=( "${llvm_path}" )
elif [[ ${added} && ${x} == ${llvm_path} ]]; then
# deduplicate
continue
fi
added=1
fi
new_path+=( "${x}" )
done
# ...or to the end of PATH
[[ ${added} ]] || new_path+=( "${llvm_path}" )
export PATH=${new_path[*]}
fi
}
_LLVM_ECLASS=1
fi

253
eclass/llvm.org.eclass Normal file
View File

@ -0,0 +1,253 @@
# Copyright 2019-2021 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: llvm.org.eclass
# @MAINTAINER:
# Michał Górny <mgorny@gentoo.org>
# @AUTHOR:
# Michał Górny <mgorny@gentoo.org>
# @BLURB: Common bits for fetching & unpacking llvm.org projects
# @DESCRIPTION:
# The llvm.org eclass provides common code to fetch and unpack parts
# of the llvm.org project tree. It takes care of handling both git
# checkouts and source tarballs, making it possible to unify the code
# of live and release ebuilds and effectively reduce the work needed
# to package new releases/RCs/branches.
#
# In order to use this eclass, the ebuild needs to declare
# LLVM_COMPONENTS and then call llvm.org_set_globals. If tests require
# additional components, they need to be listed in LLVM_TEST_COMPONENTS.
# The eclass exports an implementation of src_unpack() phase.
#
# Example:
# @CODE
# inherit llvm.org
#
# LLVM_COMPONENTS=( lld )
# LLVM_TEST_COMPONENTS=( llvm/utils/lit )
# llvm.org_set_globals
# @CODE
case "${EAPI:-0}" in
7)
;;
*)
die "Unsupported EAPI=${EAPI} for ${ECLASS}"
;;
esac
# == internal control bits ==
# @ECLASS-VARIABLE: _LLVM_MASTER_MAJOR
# @INTERNAL
# @DESCRIPTION:
# The major version of current LLVM trunk. Used to determine
# the correct branch to use.
_LLVM_MASTER_MAJOR=13
# @ECLASS-VARIABLE: _LLVM_SOURCE_TYPE
# @INTERNAL
# @DESCRIPTION:
# Source type to use: 'git' or 'tar'.
if [[ -z ${_LLVM_SOURCE_TYPE+1} ]]; then
if [[ ${PV} == *.9999 ]]; then
_LLVM_SOURCE_TYPE=git
else
_LLVM_SOURCE_TYPE=tar
fi
fi
[[ ${_LLVM_SOURCE_TYPE} == git ]] && inherit git-r3
[[ ${PV} == ${_LLVM_MASTER_MAJOR}.* && ${_LLVM_SOURCE_TYPE} == tar ]] &&
die "${ECLASS}: Release ebuild for master branch?!"
inherit multiprocessing
# == control variables ==
# @ECLASS-VARIABLE: LLVM_COMPONENTS
# @REQUIRED
# @DESCRIPTION:
# List of components needed unconditionally. Specified as bash array
# with paths relative to llvm-project git. Automatically translated
# for tarball releases.
#
# The first path specified is used to construct default S.
# @ECLASS-VARIABLE: LLVM_TEST_COMPONENTS
# @DEFAULT_UNSET
# @DESCRIPTION:
# List of additional components needed for tests.
# @ECLASS-VARIABLE: LLVM_MANPAGES
# @DEFAULT_UNSET
# @DESCRIPTION:
# Set to 'build', include the dependency on dev-python/sphinx to build
# the manpages. If set to 'pregenerated', fetch and install
# pregenerated manpages from the archive.
# == global scope logic ==
# @FUNCTION: llvm.org_set_globals
# @DESCRIPTION:
# Set global variables. This must be called after setting LLVM_*
# variables used by the eclass.
llvm.org_set_globals() {
if [[ $(declare -p LLVM_COMPONENTS) != "declare -a"* ]]; then
die 'LLVM_COMPONENTS must be an array.'
fi
if declare -p LLVM_TEST_COMPONENTS &>/dev/null; then
if [[ $(declare -p LLVM_TEST_COMPONENTS) != "declare -a"* ]]; then
die 'LLVM_TEST_COMPONENTS must be an array.'
fi
fi
if [[ ${_LLVM_SOURCE_TYPE} == git ]]; then
EGIT_REPO_URI="https://github.com/llvm/llvm-project.git"
[[ ${PV} != ${_LLVM_MASTER_MAJOR}.* ]] &&
EGIT_BRANCH="release/${PV%%.*}.x"
elif [[ ${_LLVM_SOURCE_TYPE} == tar ]]; then
SRC_URI+="
https://github.com/llvm/llvm-project/archive/llvmorg-${PV/_/-}.tar.gz"
else
die "Invalid _LLVM_SOURCE_TYPE: ${LLVM_SOURCE_TYPE}"
fi
S=${WORKDIR}/${LLVM_COMPONENTS[0]}
if [[ -n ${LLVM_TEST_COMPONENTS+1} ]]; then
IUSE+=" test"
RESTRICT+=" !test? ( test )"
fi
case ${LLVM_MANPAGES:-__unset__} in
__unset__)
# no manpage support
;;
build)
IUSE+=" doc"
# NB: this is not always the correct dep but it does no harm
BDEPEND+=" dev-python/sphinx"
;;
pregenerated)
IUSE+=" doc"
SRC_URI+="
!doc? (
https://dev.gentoo.org/~mgorny/dist/llvm/llvm-${PV}-manpages.tar.bz2
)"
;;
*)
die "Invalid LLVM_MANPAGES=${LLVM_MANPAGES}"
esac
# === useful defaults for cmake-based packages ===
# least intrusive of all
CMAKE_BUILD_TYPE=RelWithDebInfo
_LLVM_ORG_SET_GLOBALS_CALLED=1
}
# == phase functions ==
EXPORT_FUNCTIONS src_unpack
if ver_test -ge 10.0.1_rc; then
EXPORT_FUNCTIONS src_prepare
fi
# @FUNCTION: llvm.org_src_unpack
# @DESCRIPTION:
# Unpack or checkout requested LLVM components.
llvm.org_src_unpack() {
if [[ ! ${_LLVM_ORG_SET_GLOBALS_CALLED} ]]; then
die "llvm.org_set_globals must be called in global scope"
fi
local components=( "${LLVM_COMPONENTS[@]}" )
if [[ ${LLVM_TEST_COMPONENTS+1} ]] && use test; then
components+=( "${LLVM_TEST_COMPONENTS[@]}" )
fi
if [[ ${_LLVM_SOURCE_TYPE} == git ]]; then
git-r3_fetch
git-r3_checkout '' . '' "${components[@]}"
default_src_unpack
else
local archive=llvmorg-${PV/_/-}.tar.gz
ebegin "Unpacking from ${archive}"
tar -x -z -o --strip-components 1 \
-f "${DISTDIR}/${archive}" \
"${components[@]/#/llvm-project-${archive%.tar*}/}" || die
eend ${?}
# unpack all remaining distfiles
local x
for x in ${A}; do
[[ ${x} != ${archive} ]] && unpack "${x}"
done
fi
}
# @FUNCTION: llvm.org_src_prepare
# @DESCRIPTION:
# Call appropriate src_prepare (cmake or default) depending on inherited
# eclasses. Make sure that PATCHES and user patches are applied in top
# ${WORKDIR}, so that patches straight from llvm-project repository
# work correctly with -p1.
llvm.org_src_prepare() {
if declare -f cmake_src_prepare >/dev/null; then
# cmake eclasses force ${S} for default_src_prepare
# but use ${CMAKE_USE_DIR} for everything else
CMAKE_USE_DIR=${S} \
S=${WORKDIR} \
cmake_src_prepare
else
pushd "${WORKDIR}" >/dev/null || die
default_src_prepare
popd >/dev/null || die
fi
}
# == helper functions ==
# @ECLASS-VARIABLE: LIT_JOBS
# @USER_VARIABLE
# @DEFAULT_UNSET
# @DESCRIPTION:
# Number of test jobs to run simultaneously. If unset, defaults
# to '-j' in MAKEOPTS. If that is not found, default to nproc.
# @FUNCTION: get_lit_flags
# @DESCRIPTION:
# Get the standard recommended lit flags for running tests, in CMake
# list form (;-separated).
get_lit_flags() {
echo "-vv;-j;${LIT_JOBS:-$(makeopts_jobs "${MAKEOPTS}" "$(get_nproc)")}"
}
# @FUNCTION: llvm_are_manpages_built
# @DESCRIPTION:
# Return true (0) if manpages are going to be built from source,
# false (1) if preinstalled manpages will be used.
llvm_are_manpages_built() {
use doc || [[ ${LLVM_MANPAGES} == build ]]
}
# @FUNCTION: llvm_install_manpages
# @DESCRIPTION:
# Install pregenerated manpages if available. No-op otherwise.
llvm_install_manpages() {
# install pre-generated manpages
if ! llvm_are_manpages_built; then
# (doman does not support custom paths)
insinto "/usr/lib/llvm/${SLOT}/share/man/man1"
doins "${WORKDIR}/llvm-${PV}-manpages/${LLVM_COMPONENTS[0]}"/*.1
fi
}

176
eclass/ltprune.eclass Normal file
View File

@ -0,0 +1,176 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: ltprune.eclass
# @MAINTAINER:
# Michał Górny <mgorny@gentoo.org>
# @SUPPORTED_EAPIS: 0 1 2 3 4 5 6
# @BLURB: Smart .la file pruning
# @DEPRECATED: none
# @DESCRIPTION:
# A function to locate and remove unnecessary .la files.
#
# Discouraged. Whenever possible, please use much simpler:
# @CODE
# find "${ED}" -name '*.la' -delete || die
# @CODE
if [[ -z ${_LTPRUNE_ECLASS} ]]; then
case ${EAPI:-0} in
0|1|2|3|4|5|6)
;;
*)
die "${ECLASS}: banned in EAPI=${EAPI}; use 'find' instead";;
esac
inherit toolchain-funcs
# @FUNCTION: prune_libtool_files
# @USAGE: [--all|--modules]
# @DESCRIPTION:
# Locate unnecessary libtool files (.la) and libtool static archives
# (.a) and remove them from installation image.
#
# By default, .la files are removed whenever the static linkage can
# either be performed using pkg-config or doesn't introduce additional
# flags.
#
# If '--modules' argument is passed, .la files for modules (plugins) are
# removed as well. This is usually useful when the package installs
# plugins and the plugin loader does not use .la files.
#
# If '--all' argument is passed, all .la files are removed without
# performing any heuristic on them. You shouldn't ever use that,
# and instead report a bug in the algorithm instead.
#
# The .a files are only removed whenever corresponding .la files state
# that they should not be linked to, i.e. whenever these files
# correspond to plugins.
#
# Note: if your package installs both static libraries and .pc files
# which use variable substitution for -l flags, you need to add
# pkg-config to your DEPEND.
prune_libtool_files() {
debug-print-function ${FUNCNAME} "$@"
local removing_all removing_modules opt
for opt; do
case "${opt}" in
--all)
removing_all=1
removing_modules=1
;;
--modules)
removing_modules=1
;;
*)
die "Invalid argument to ${FUNCNAME}(): ${opt}"
esac
done
local f
local queue=()
while IFS= read -r -d '' f; do # for all .la files
local archivefile=${f/%.la/.a}
# The following check is done by libtool itself.
# It helps us avoid removing random files which match '*.la',
# see bug #468380.
if ! sed -n -e '/^# Generated by .*libtool/q0;4q1' "${f}"; then
continue
fi
[[ ${f} != ${archivefile} ]] || die 'regex sanity check failed'
local reason= pkgconfig_scanned=
local snotlink=$(sed -n -e 's:^shouldnotlink=::p' "${f}")
if [[ ${snotlink} == yes ]]; then
# Remove static libs we're not supposed to link against.
if [[ -f ${archivefile} ]]; then
einfo "Removing unnecessary ${archivefile#${D%/}} (static plugin)"
queue+=( "${archivefile}" )
fi
# The .la file may be used by a module loader, so avoid removing it
# unless explicitly requested.
if [[ ${removing_modules} ]]; then
reason='module'
fi
else
# Remove .la files when:
# - user explicitly wants us to remove all .la files,
# - respective static archive doesn't exist,
# - they are covered by a .pc file already,
# - they don't provide any new information (no libs & no flags).
if [[ ${removing_all} ]]; then
reason='requested'
elif [[ ! -f ${archivefile} ]]; then
reason='no static archive'
elif [[ ! $(sed -nre \
"s/^(dependency_libs|inherited_linker_flags)='(.*)'$/\2/p" \
"${f}") ]]; then
reason='no libs & flags'
else
if [[ ! ${pkgconfig_scanned} ]]; then
# Create a list of all .pc-covered libs.
local pc_libs=()
if [[ ! ${removing_all} ]]; then
local pc
local tf=${T}/prune-lt-files.pc
local pkgconf=$(tc-getPKG_CONFIG)
while IFS= read -r -d '' pc; do # for all .pc files
local arg libs
# Use pkg-config if available (and works),
# fallback to sed.
if ${pkgconf} --exists "${pc}" &>/dev/null; then
sed -e '/^Requires:/d' "${pc}" > "${tf}"
libs=$(${pkgconf} --libs "${tf}")
else
libs=$(sed -ne 's/^Libs://p' "${pc}")
fi
for arg in ${libs}; do
if [[ ${arg} == -l* ]]; then
if [[ ${arg} == '*$*' ]]; then
eerror "${FUNCNAME}: variable substitution likely failed in ${pc}"
eerror "(arg: ${arg})"
eerror "Most likely, you need to add virtual/pkgconfig to DEPEND."
die "${FUNCNAME}: unsubstituted variable found in .pc"
fi
pc_libs+=( lib${arg#-l}.la )
fi
done
done < <(find "${D}" -type f -name '*.pc' -print0)
rm -f "${tf}"
fi
pkgconfig_scanned=1
fi # pkgconfig_scanned
has "${f##*/}" "${pc_libs[@]}" && reason='covered by .pc'
fi # removal due to .pc
fi # shouldnotlink==no
if [[ ${reason} ]]; then
einfo "Removing unnecessary ${f#${D%/}} (${reason})"
queue+=( "${f}" )
fi
done < <(find "${D}" -xtype f -name '*.la' -print0)
if [[ ${queue[@]} ]]; then
rm -f "${queue[@]}"
fi
}
_LTPRUNE_ECLASS=1
fi #_LTPRUNE_ECLASS

540
eclass/lua-single.eclass Normal file
View File

@ -0,0 +1,540 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: lua-single.eclass
# @MAINTAINER:
# William Hubbs <williamh@gentoo.org>
# Marek Szuba <marecki@gentoo.org>
# @AUTHOR:
# Marek Szuba <marecki@gentoo.org>
# Based on python-single-r1.eclass by Michał Górny <mgorny@gentoo.org> et al.
# @SUPPORTED_EAPIS: 7
# @BLURB: An eclass for Lua packages not installed for multiple implementations.
# @DESCRIPTION:
# An extension of lua.eclass suite for packages which don't support being
# installed for multiple Lua implementations. This mostly includes software
# embedding Lua.
#
# This eclass sets correct IUSE. It also provides LUA_DEPS
# and LUA_REQUIRED_USE that need to be added to appropriate ebuild
# metadata variables.
#
# The eclass exports LUA_SINGLE_USEDEP that is suitable for depending
# on other packages using the eclass. Dependencies on packages using
# lua.eclass should be created via lua_gen_cond_dep() function, using
# LUA_USEDEP placeholder.
#
# Please note that packages support multiple Lua implementations
# (using lua.eclass) cannot depend on packages not supporting
# them (using this eclass).
#
# Note that since this eclass always inherits lua-utils as well, in ebuilds
# using the former there is no need to explicitly inherit the latter in order
# to use helper functions such as lua_get_CFLAGS.
#
# @EXAMPLE:
# @CODE
# EAPI=7
#
# LUA_COMPAT=( lua5-{1..3} )
#
# inherit lua-single
#
# [...]
#
# REQUIRED_USE="${LUA_REQUIRED_USE}"
# DEPEND="${LUA_DEPS}"
# RDEPEND="${DEPEND}
# $(lua_gen_cond_dep '
# dev-lua/foo[${LUA_USEDEP}]
# ')
# "
# BDEPEND="virtual/pkgconfig"
#
# # Only neeed if the setup phase has to do more than just call lua-single_pkg_setup
# pkg_setup() {
# lua-single_pkg_setup
# [...]
# }
#
# src_install() {
# emake LUA_VERSION="$(lua_get_version)" install
# }
# @CODE
case ${EAPI:-0} in
0|1|2|3|4|5|6)
die "Unsupported EAPI=${EAPI} (too old) for ${ECLASS}"
;;
7)
;;
*)
die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
;;
esac
if [[ ! ${_LUA_SINGLE_R0} ]]; then
if [[ ${_LUA_R0} ]]; then
die 'lua-single.eclass cannot be used with lua.eclass.'
fi
inherit lua-utils
fi
EXPORT_FUNCTIONS pkg_setup
# @ECLASS-VARIABLE: LUA_COMPAT
# @REQUIRED
# @PRE_INHERIT
# @DESCRIPTION:
# This variable contains a list of Lua implementations the package
# supports. It must be set before the `inherit' call. It has to be
# an array.
#
# Example:
# @CODE
# LUA_COMPAT=( lua5-1 lua5-2 lua5-3 )
# @CODE
#
# Please note that you can also use bash brace expansion if you like:
# @CODE
# LUA_COMPAT=( lua5-{1..3} )
# @CODE
# @ECLASS-VARIABLE: LUA_COMPAT_OVERRIDE
# @USER_VARIABLE
# @DEFAULT_UNSET
# @DESCRIPTION:
# This variable can be used when working with ebuilds to override
# the in-ebuild LUA_COMPAT. It is a string listing all
# the implementations which package will be built for. It need be
# specified in the calling environment, and not in ebuilds.
#
# It should be noted that in order to preserve metadata immutability,
# LUA_COMPAT_OVERRIDE does not affect IUSE nor dependencies.
# The state of LUA_TARGETS is ignored, and all the implementations
# in LUA_COMPAT_OVERRIDE are built. Dependencies need to be satisfied
# manually.
#
# Example:
# @CODE
# LUA_COMPAT_OVERRIDE='lua5-2' emerge -1v dev-lua/foo
# @CODE
# @ECLASS-VARIABLE: LUA_REQ_USE
# @DEFAULT_UNSET
# @PRE_INHERIT
# @DESCRIPTION:
# The list of USE flags required to be enabled on the chosen Lua
# implementations, formed as a USE-dependency string. It should be valid
# for all implementations in LUA_COMPAT, so it may be necessary to
# use USE defaults.
# This must be set before calling `inherit'.
#
# Example:
# @CODE
# LUA_REQ_USE="deprecated"
# @CODE
#
# It will cause the Lua dependencies to look like:
# @CODE
# lua_targets_luaX-Y? ( dev-lang/lua:X.Y[deprecated] )
# @CODE
# @ECLASS-VARIABLE: LUA_DEPS
# @OUTPUT_VARIABLE
# @DESCRIPTION:
# This is an eclass-generated Lua dependency string for all
# implementations listed in LUA_COMPAT.
#
# Example use:
# @CODE
# RDEPEND="${LUA_DEPS}
# dev-foo/mydep"
# DEPEND="${RDEPEND}"
# @CODE
#
# Example value:
# @CODE
# lua_targets_lua5-1? ( dev-lang/lua:5.1 )
# lua_targets_lua5-2? ( dev-lang/lua:5.2 )
# @CODE
# @ECLASS-VARIABLE: LUA_REQUIRED_USE
# @OUTPUT_VARIABLE
# @DESCRIPTION:
# This is an eclass-generated required-use expression which ensures at
# least one Lua implementation has been enabled.
#
# This expression should be utilized in an ebuild by including it in
# REQUIRED_USE, optionally behind a use flag.
#
# Example use:
# @CODE
# REQUIRED_USE="lua? ( ${LUA_REQUIRED_USE} )"
# @CODE
#
# Example value:
# @CODE
# || ( lua_targets_lua5-1 lua_targets_lua5-2 )
# @CODE
# @ECLASS-VARIABLE: LUA_SINGLE_USEDEP
# @OUTPUT_VARIABLE
# @DESCRIPTION:
# This is an eclass-generated USE-dependency string which can be used
# to depend on another lua-single package being built for the same
# Lua implementations.
#
# If you need to depend on a multi-impl (lua.eclass) package, use
# lua_gen_cond_dep with LUA_USEDEP placeholder instead.
#
# Example use:
# @CODE
# RDEPEND="dev-lua/foo[${LUA_SINGLE_USEDEP}]"
# @CODE
#
# Example value:
# @CODE
# lua_single_target_lua5-1(-)?
# @CODE
# @ECLASS-VARIABLE: LUA_USEDEP
# @OUTPUT_VARIABLE
# @DESCRIPTION:
# This is an eclass-generated USE-dependency string which can be used to
# depend on another Lua package being built for the same Lua
# implementations.
#
# Example use:
# @CODE
# RDEPEND="dev-lua/foo[${LUA_USEDEP}]"
# @CODE
#
# Example value:
# @CODE
# lua_targets_lua5-1(-)?,lua_targets_lua5-2(-)?
# @CODE
# @FUNCTION: _lua_single_set_globals
# @INTERNAL
# @DESCRIPTION:
# Sets all the global output variables provided by this eclass.
# This function must be called once, in global scope.
_lua_single_set_globals() {
_lua_set_impls
local flags=( "${_LUA_SUPPORTED_IMPLS[@]/#/lua_single_target_}" )
if [[ ${#_LUA_SUPPORTED_IMPLS[@]} -eq 1 ]]; then
# if only one implementation is supported, use IUSE defaults
# to avoid requesting the user to enable it
IUSE="+${flags[0]}"
else
IUSE="${flags[*]}"
fi
local requse="^^ ( ${flags[*]} )"
local single_flags="${flags[@]/%/(-)?}"
local single_usedep=${single_flags// /,}
local deps= i LUA_PKG_DEP
for i in "${_LUA_SUPPORTED_IMPLS[@]}"; do
_lua_export "${i}" LUA_PKG_DEP
deps+="lua_single_target_${i}? ( ${LUA_PKG_DEP} ) "
done
if [[ ${LUA_DEPS+1} ]]; then
if [[ ${LUA_DEPS} != "${deps}" ]]; then
eerror "LUA_DEPS have changed between inherits (LUA_REQ_USE?)!"
eerror "Before: ${LUA_DEPS}"
eerror "Now : ${deps}"
die "LUA_DEPS integrity check failed"
fi
# these two are formality -- they depend on LUA_COMPAT only
if [[ ${LUA_REQUIRED_USE} != ${requse} ]]; then
eerror "LUA_REQUIRED_USE have changed between inherits!"
eerror "Before: ${LUA_REQUIRED_USE}"
eerror "Now : ${requse}"
die "LUA_REQUIRED_USE integrity check failed"
fi
if [[ ${LUA_SINGLE_USEDEP} != "${single_usedep}" ]]; then
eerror "LUA_SINGLE_USEDEP have changed between inherits!"
eerror "Before: ${LUA_SINGLE_USEDEP}"
eerror "Now : ${single_usedep}"
die "LUA_SINGLE_USEDEP integrity check failed"
fi
else
LUA_DEPS=${deps}
LUA_REQUIRED_USE=${requse}
LUA_SINGLE_USEDEP=${single_usedep}
LUA_USEDEP='%LUA_USEDEP-NEEDS-TO-BE-USED-IN-LUA_GEN_COND_DEP%'
readonly LUA_DEPS LUA_REQUIRED_USE LUA_SINGLE_USEDEP LUA_USEDEP
fi
}
_lua_single_set_globals
unset -f _lua_single_set_globals
if [[ ! ${_LUA_SINGLE_R0} ]]; then
# @FUNCTION: _lua_gen_usedep
# @USAGE: [<pattern>...]
# @INTERNAL
# @DESCRIPTION:
# Output a USE dependency string for Lua implementations which
# are both in LUA_COMPAT and match any of the patterns passed
# as parameters to the function.
#
# The patterns can be fnmatch-style patterns (matched via bash == operator
# against LUA_COMPAT values). Remember to escape or quote the fnmatch
# patterns to prevent accidental shell filename expansion.
#
# This is an internal function used to implement lua_gen_cond_dep.
_lua_gen_usedep() {
debug-print-function ${FUNCNAME} "${@}"
local impl matches=()
_lua_verify_patterns "${@}"
for impl in "${_LUA_SUPPORTED_IMPLS[@]}"; do
if _lua_impl_matches "${impl}" "${@}"; then
matches+=(
"lua_single_target_${impl}(-)?"
)
fi
done
[[ ${matches[@]} ]] || die "No supported implementations match lua_gen_usedep patterns: ${@}"
local out=${matches[@]}
echo "${out// /,}"
}
# @FUNCTION: _lua_impl_matches
# @USAGE: <impl> [<pattern>...]
# @INTERNAL
# @DESCRIPTION:
# Check whether the specified <impl> matches at least one
# of the patterns following it. Return 0 if it does, 1 otherwise.
# Matches if no patterns are provided.
#
# <impl> can be in LUA_COMPAT or ELUA form. The patterns can be
# fnmatch-style patterns, e.g. 'lua5*', '..
_lua_impl_matches() {
[[ ${#} -ge 1 ]] || die "${FUNCNAME}: takes at least 1 parameter"
[[ ${#} -eq 1 ]] && return 0
local impl=${1} pattern
shift
for pattern; do
# unify value style to allow lax matching
if [[ ${impl/./-} == ${pattern/./-} ]]; then
return 0
fi
done
return 1
}
# @FUNCTION: _lua_verify_patterns
# @USAGE: <pattern>...
# @INTERNAL
# @DESCRIPTION:
# Verify whether the patterns passed to the eclass function are correct
# (i.e. can match any valid implementation). Dies on wrong pattern.
_lua_verify_patterns() {
debug-print-function ${FUNCNAME} "${@}"
local impl pattern
for pattern; do
for impl in "${_LUA_ALL_IMPLS[@]}"; do
[[ ${impl} == ${pattern/./-} ]] && continue 2
done
die "Invalid implementation pattern: ${pattern}"
done
}
# @FUNCTION: lua_gen_cond_dep
# @USAGE: <dependency> [<pattern>...]
# @DESCRIPTION:
# Output a list of <dependency>-ies made conditional to USE flags
# of Lua implementations which are both in LUA_COMPAT and match
# any of the patterns passed as the remaining parameters.
#
# The patterns can be fnmatch-style patterns (matched via bash == operator
# against LUA_COMPAT values). Remember to escape or quote the fnmatch
# patterns to prevent accidental shell filename expansion.
#
# In order to enforce USE constraints on the packages, verbatim
# '${LUA_SINGLE_USEDEP}' and '${LUA_USEDEP}' (quoted!) may
# be placed in the dependency specification. It will get expanded within
# the function into a proper USE dependency string.
#
# Example:
# @CODE
# LUA_COMPAT=( lua5-{1..3} )
# RDEPEND="$(lua_gen_cond_dep \
# 'dev-lua/backported_core_module[${LUA_USEDEP}]' lua5-1 lua5-2 )"
# @CODE
#
# It will cause the variable to look like:
# @CODE
# RDEPEND="lua_single_target_lua5-1? (
# dev-lua/backported_core_module[lua_targets_lua5-1(-)?,...] )
# lua_single_target_lua5-2? (
# dev-lua/backported_core_module[lua_targets_lua5-2(-)?,...] )"
# @CODE
lua_gen_cond_dep() {
debug-print-function ${FUNCNAME} "${@}"
local impl matches=()
local dep=${1}
shift
_lua_verify_patterns "${@}"
for impl in "${_LUA_SUPPORTED_IMPLS[@]}"; do
if _lua_impl_matches "${impl}" "${@}"; then
# substitute ${LUA_SINGLE_USEDEP} if used
# (since lua_gen_usedep() will not return
# ${LUA_SINGLE_USEDEP}, the code is run at most once)
if [[ ${dep} == *'${LUA_SINGLE_USEDEP}'* ]]; then
local usedep=$(_lua_gen_usedep "${@}")
dep=${dep//\$\{LUA_SINGLE_USEDEP\}/${usedep}}
fi
local multi_usedep="lua_targets_${impl}(-)"
local subdep=${dep//\$\{LUA_MULTI_USEDEP\}/${multi_usedep}}
matches+=( "lua_single_target_${impl}? (
${subdep//\$\{LUA_USEDEP\}/${multi_usedep}} )" )
fi
done
echo "${matches[@]}"
}
# @FUNCTION: lua_gen_impl_dep
# @USAGE: [<requested-use-flags> [<impl-pattern>...]]
# @DESCRIPTION:
# Output a dependency on Lua implementations with the specified USE
# dependency string appended, or no USE dependency string if called
# without the argument (or with empty argument). If any implementation
# patterns are passed, the output dependencies will be generated only
# for the implementations matching them.
#
# The patterns can be fnmatch-style patterns (matched via bash == operator
# against LUA_COMPAT values). Remember to escape or quote the fnmatch
# patterns to prevent accidental shell filename expansion.
#
# Use this function when you need to request different USE flags
# on the Lua interpreter depending on package's USE flags. If you
# only need a single set of interpreter USE flags, just set
# LUA_REQ_USE and use ${LUA_DEPS} globally.
#
# Example:
# @CODE
# LUA_COMPAT=( lua5-{1..3} )
# RDEPEND="foo? ( $(lua_gen_impl_dep 'deprecated(+)' lua5-3 ) )"
# @CODE
#
# It will cause the variable to look like:
# @CODE
# RDEPEND="foo? (
# lua_single_target_lua5-3? ( dev-lang/lua:5.3[deprecated(+)] )
# )"
# @CODE
lua_gen_impl_dep() {
debug-print-function ${FUNCNAME} "${@}"
local impl
local matches=()
local LUA_REQ_USE=${1}
shift
_lua_verify_patterns "${@}"
for impl in "${_LUA_SUPPORTED_IMPLS[@]}"; do
if _lua_impl_matches "${impl}" "${@}"; then
local LUA_PKG_DEP
_lua_export "${impl}" LUA_PKG_DEP
matches+=( "lua_single_target_${impl}? ( ${LUA_PKG_DEP} )" )
fi
done
echo "${matches[@]}"
}
# @FUNCTION: lua_setup
# @DESCRIPTION:
# Determine what the selected Lua implementation is and set
# the Lua build environment up for it.
lua_setup() {
debug-print-function ${FUNCNAME} "${@}"
unset ELUA
# support developer override
if [[ ${LUA_COMPAT_OVERRIDE} ]]; then
local impls=( ${LUA_COMPAT_OVERRIDE} )
[[ ${#impls[@]} -eq 1 ]] || die "LUA_COMPAT_OVERRIDE must name exactly one implementation for lua-single"
ewarn "WARNING: LUA_COMPAT_OVERRIDE in effect. The following Lua"
ewarn "implementation will be used:"
ewarn
ewarn " ${LUA_COMPAT_OVERRIDE}"
ewarn
ewarn "Dependencies won't be satisfied, and LUA_SINGLE_TARGET flags will be ignored."
_lua_export "${impls[0]}" ELUA LUA
_lua_wrapper_setup
einfo "Using ${ELUA} to build"
return
fi
local impl
for impl in "${_LUA_SUPPORTED_IMPLS[@]}"; do
if use "lua_single_target_${impl}"; then
if [[ ${ELUA} ]]; then
eerror "Your LUA_SINGLE_TARGET setting lists more than a single Lua"
eerror "implementation. Please set it to just one value. If you need"
eerror "to override the value for a single package, please use package.env"
eerror "or an equivalent solution (man 5 portage)."
echo
die "More than one implementation in LUA_SINGLE_TARGET."
fi
_lua_export "${impl}" ELUA LUA
_lua_wrapper_setup
einfo "Using ${ELUA} to build"
fi
done
if [[ ! ${ELUA} ]]; then
eerror "No Lua implementation selected for the build. Please set"
eerror "the LUA_SINGLE_TARGET variable in your make.conf to one"
eerror "of the following values:"
eerror
eerror "${_LUA_SUPPORTED_IMPLS[@]}"
echo
die "No supported Lua implementation in LUA_SINGLE_TARGET."
fi
}
# @FUNCTION: lua-single_pkg_setup
# @DESCRIPTION:
# Runs lua_setup.
lua-single_pkg_setup() {
debug-print-function ${FUNCNAME} "${@}"
[[ ${MERGE_TYPE} != binary ]] && lua_setup
}
_LUA_SINGLE_R0=1
fi

532
eclass/lua-utils.eclass Normal file
View File

@ -0,0 +1,532 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: lua-utils.eclass
# @MAINTAINER:
# William Hubbs <williamh@gentoo.org>
# Marek Szuba <marecki@gentoo.org>
# @AUTHOR:
# Marek Szuba <marecki@gentoo.org>
# Based on python-utils-r1.eclass by Michał Górny <mgorny@gentoo.org> et al.
# @SUPPORTED_EAPIS: 7
# @BLURB: Utility functions for packages with Lua parts
# @DESCRIPTION:
# A utility eclass providing functions to query Lua implementations,
# install Lua modules and scripts.
#
# This eclass neither sets any metadata variables nor exports any phase
# functions. It can be inherited safely.
case ${EAPI:-0} in
0|1|2|3|4|5|6)
die "Unsupported EAPI=${EAPI} (too old) for ${ECLASS}"
;;
7)
;;
*)
die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
;;
esac
if [[ ! ${_LUA_UTILS_R0} ]]; then
inherit toolchain-funcs
# @ECLASS-VARIABLE: _LUA_ALL_IMPLS
# @INTERNAL
# @DESCRIPTION:
# All supported Lua implementations, most preferred last
_LUA_ALL_IMPLS=(
luajit
lua5-1
lua5-2
lua5-3
lua5-4
)
readonly _LUA_ALL_IMPLS
# @FUNCTION: _lua_set_impls
# @INTERNAL
# @DESCRIPTION:
# Check LUA_COMPAT for well-formedness and validity, then set
# two global variables:
#
# - _LUA_SUPPORTED_IMPLS containing valid implementations supported
# by the ebuild (LUA_COMPAT minus dead implementations),
#
# - and _LUA_UNSUPPORTED_IMPLS containing valid implementations that
# are not supported by the ebuild.
#
# Implementations in both variables are ordered using the pre-defined
# eclass implementation ordering.
#
# This function must only be called once.
_lua_set_impls() {
local i
if ! declare -p LUA_COMPAT &>/dev/null; then
die 'LUA_COMPAT not declared.'
fi
if [[ $(declare -p LUA_COMPAT) != "declare -a"* ]]; then
die 'LUA_COMPAT must be an array.'
fi
local supp=() unsupp=()
for i in "${_LUA_ALL_IMPLS[@]}"; do
if has "${i}" "${LUA_COMPAT[@]}"; then
supp+=( "${i}" )
else
unsupp+=( "${i}" )
fi
done
if [[ ! ${supp[@]} ]]; then
die "No supported implementation in LUA_COMPAT."
fi
if [[ ${_LUA_SUPPORTED_IMPLS[@]} ]]; then
# set once already, verify integrity
if [[ ${_LUA_SUPPORTED_IMPLS[@]} != ${supp[@]} ]]; then
eerror "Supported impls (LUA_COMPAT) changed between inherits!"
eerror "Before: ${_LUA_SUPPORTED_IMPLS[*]}"
eerror "Now : ${supp[*]}"
die "_LUA_SUPPORTED_IMPLS integrity check failed"
fi
if [[ ${_LUA_UNSUPPORTED_IMPLS[@]} != ${unsupp[@]} ]]; then
eerror "Unsupported impls changed between inherits!"
eerror "Before: ${_LUA_UNSUPPORTED_IMPLS[*]}"
eerror "Now : ${unsupp[*]}"
die "_LUA_UNSUPPORTED_IMPLS integrity check failed"
fi
else
_LUA_SUPPORTED_IMPLS=( "${supp[@]}" )
_LUA_UNSUPPORTED_IMPLS=( "${unsupp[@]}" )
readonly _LUA_SUPPORTED_IMPLS _LUA_UNSUPPORTED_IMPLS
fi
}
# @FUNCTION: _lua_wrapper_setup
# @USAGE: [<path> [<impl>]]
# @INTERNAL
# @DESCRIPTION:
# Create proper Lua executables and pkg-config wrappers
# (if available) in the directory named by <path>. Set up PATH
# and PKG_CONFIG_PATH appropriately. <path> defaults to ${T}/${ELUA}.
#
# The wrappers will be created for implementation named by <impl>,
# or for one named by ${ELUA} if no <impl> passed.
#
# If the named directory contains a lua symlink already, it will
# be assumed to contain proper wrappers already and only environment
# setup will be done. If wrapper update is requested, the directory
# shall be removed first.
_lua_wrapper_setup() {
debug-print-function ${FUNCNAME} "${@}"
local workdir=${1:-${T}/${ELUA}}
local impl=${2:-${ELUA}}
[[ ${workdir} ]] || die "${FUNCNAME}: no workdir specified."
[[ ${impl} ]] || die "${FUNCNAME}: no impl nor ELUA specified."
if [[ ! -x ${workdir}/bin/lua ]]; then
mkdir -p "${workdir}"/{bin,pkgconfig} || die
# Clean up, in case we were supposed to do a cheap update
rm -f "${workdir}"/bin/lua{,c} || die
rm -f "${workdir}"/pkgconfig/lua.pc || die
local ELUA LUA
_lua_export "${impl}" ELUA LUA
# Lua interpreter
ln -s "${EPREFIX}"/usr/bin/${ELUA} "${workdir}"/bin/lua || die
# Lua compiler, or a stub for it in case of luajit
if [[ ${ELUA} == luajit ]]; then
# Just in case
ln -s "${EPREFIX}"/bin/true "${workdir}"/bin/luac || die
else
ln -s "${EPREFIX}"/usr/bin/${ELUA/a/ac} "${workdir}"/bin/luac || die
fi
# pkg-config
ln -s "${EPREFIX}"/usr/$(get_libdir)/pkgconfig/${ELUA}.pc \
"${workdir}"/pkgconfig/lua.pc || die
fi
# Now, set the environment.
# But note that ${workdir} may be shared with something else,
# and thus already on top of PATH.
if [[ ${PATH##:*} != ${workdir}/bin ]]; then
PATH=${workdir}/bin${PATH:+:${PATH}}
fi
if [[ ${PKG_CONFIG_PATH##:*} != ${workdir}/pkgconfig ]]; then
PKG_CONFIG_PATH=${workdir}/pkgconfig${PKG_CONFIG_PATH:+:${PKG_CONFIG_PATH}}
fi
export PATH PKG_CONFIG_PATH
}
# @ECLASS-VARIABLE: ELUA
# @DEFAULT_UNSET
# @DESCRIPTION:
# The executable name of the current Lua interpreter. This variable is set
# automatically in functions called by lua_foreach_impl().
#
# Example value:
# @CODE
# lua5.1
# @CODE
# @ECLASS-VARIABLE: LUA
# @DEFAULT_UNSET
# @DESCRIPTION:
# The absolute path to the current Lua interpreter. This variable is set
# automatically in functions called by lua_foreach_impl().
#
# Example value:
# @CODE
# /usr/bin/lua5.1
# @CODE
# @FUNCTION: _lua_get_library_file
# @USAGE: <impl>
# @INTERNAL
# @DESCRIPTION:
# Get the core part (i.e. without the extension) of the library name,
# with path, of the given Lua implementation.
# Used internally by _lua_export().
_lua_get_library_file() {
local impl="${1}"
local libdir libname
case ${impl} in
luajit)
libname=lib$($(tc-getPKG_CONFIG) --variable libname ${impl}) || die
;;
lua*)
libname=lib${impl}
;;
*)
die "Invalid implementation: ${impl}"
;;
esac
libdir=$($(tc-getPKG_CONFIG) --variable libdir ${impl}) || die
debug-print "${FUNCNAME}: libdir = ${libdir}, libname = ${libname}"
echo "${libdir}/${libname}"
}
# @FUNCTION: _lua_export
# @USAGE: [<impl>] <variables>...
# @INTERNAL
# @DESCRIPTION:
# Set and export the Lua implementation-relevant variables passed
# as parameters.
#
# The optional first parameter may specify the requested Lua
# implementation (either as LUA_TARGETS value, e.g. lua5-2,
# or an ELUA one, e.g. lua5.2). If no implementation passed,
# the current one will be obtained from ${ELUA}.
_lua_export() {
debug-print-function ${FUNCNAME} "${@}"
local impl var
case "${1}" in
luajit)
impl=${1}
shift
;;
lua*)
impl=${1/-/.}
shift
;;
*)
impl=${ELUA}
if [[ -z ${impl} ]]; then
die "_lua_export called without a Lua implementation and ELUA is unset"
fi
;;
esac
debug-print "${FUNCNAME}: implementation: ${impl}"
for var; do
case "${var}" in
ELUA)
export ELUA=${impl}
debug-print "${FUNCNAME}: ELUA = ${ELUA}"
;;
LUA)
export LUA="${EPREFIX}"/usr/bin/${impl}
debug-print "${FUNCNAME}: LUA = ${LUA}"
;;
LUA_CFLAGS)
local val
val=$($(tc-getPKG_CONFIG) --cflags ${impl}) || die
export LUA_CFLAGS=${val}
debug-print "${FUNCNAME}: LUA_CFLAGS = ${LUA_CFLAGS}"
;;
LUA_CMOD_DIR)
local val
val=$($(tc-getPKG_CONFIG) --variable INSTALL_CMOD ${impl}) || die
export LUA_CMOD_DIR=${val}
debug-print "${FUNCNAME}: LUA_CMOD_DIR = ${LUA_CMOD_DIR}"
;;
LUA_INCLUDE_DIR)
local val
val=$($(tc-getPKG_CONFIG) --variable includedir ${impl}) || die
export LUA_INCLUDE_DIR=${val}
debug-print "${FUNCNAME}: LUA_INCLUDE_DIR = ${LUA_INCLUDE_DIR}"
;;
LUA_LIBS)
local val
val=$($(tc-getPKG_CONFIG) --libs ${impl}) || die
export LUA_LIBS=${val}
debug-print "${FUNCNAME}: LUA_LIBS = ${LUA_LIBS}"
;;
LUA_LMOD_DIR)
local val
val=$($(tc-getPKG_CONFIG) --variable INSTALL_LMOD ${impl}) || die
export LUA_LMOD_DIR=${val}
debug-print "${FUNCNAME}: LUA_LMOD_DIR = ${LUA_LMOD_DIR}"
;;
LUA_PKG_DEP)
local d
case ${impl} in
luajit)
LUA_PKG_DEP="dev-lang/luajit:="
;;
lua*)
LUA_PKG_DEP="dev-lang/lua:${impl#lua}"
;;
*)
die "Invalid implementation: ${impl}"
;;
esac
# use-dep
if [[ ${LUA_REQ_USE} ]]; then
LUA_PKG_DEP+=[${LUA_REQ_USE}]
fi
export LUA_PKG_DEP
debug-print "${FUNCNAME}: LUA_PKG_DEP = ${LUA_PKG_DEP}"
;;
LUA_SHARED_LIB)
local val=$(_lua_get_library_file ${impl})
export LUA_SHARED_LIB="${val}".so
debug-print "${FUNCNAME}: LUA_SHARED_LIB = ${LUA_SHARED_LIB}"
;;
LUA_VERSION)
local val
val=$($(tc-getPKG_CONFIG) --modversion ${impl}) || die
export LUA_VERSION=${val}
debug-print "${FUNCNAME}: LUA_VERSION = ${LUA_VERSION}"
;;
*)
die "_lua_export: unknown variable ${var}"
;;
esac
done
}
# @FUNCTION: lua_enable_tests
# @USAGE: <test-runner> <test-directory>
# @DESCRIPTION:
# Set up IUSE, RESTRICT, BDEPEND and src_test() for running tests
# with the specified test runner. Also copies the current value
# of RDEPEND to test?-BDEPEND. The test-runner argument must be one of:
#
# - busted: dev-lua/busted
#
# Additionally, a second argument can be passed after <test-runner>,
# so <test-runner> will use that directory to search for tests.
# If not passed, a default directory of <test-runner> will be used.
#
# - busted: spec
#
# This function is meant as a helper for common use cases, and it only
# takes care of basic setup. You still need to list additional test
# dependencies manually. If you have uncommon use case, you should
# not use it and instead enable tests manually.
#
# This function must be called in global scope, after RDEPEND has been
# declared. Take care not to overwrite the variables set by it.
lua_enable_tests() {
debug-print-function ${FUNCNAME} "${@}"
[[ ${#} -ge 1 ]] || die "${FUNCNAME} takes at least one argument: test-runner (test-directory)"
local test_directory
local test_pkg
case ${1} in
busted)
test_directory="${2:-spec}"
test_pkg="dev-lua/busted"
if [[ ! ${_LUA_SINGLE_R0} ]]; then
eval "lua_src_test() {
busted --lua=\"\${ELUA}\" --output=\"plainTerminal\" \"${test_directory}\" || die \"Tests fail with \${ELUA}\"
}"
src_test() {
lua_foreach_impl lua_src_test
}
else
eval "src_test() {
busted --lua=\"\${ELUA}\" --output=\"plainTerminal\" \"${test_directory}\" || die \"Tests fail with \${ELUA}\"
}"
fi
;;
*)
die "${FUNCNAME}: unsupported argument: ${1}"
esac
local test_deps=${RDEPEND}
if [[ -n ${test_pkg} ]]; then
if [[ ! ${_LUA_SINGLE_R0} ]]; then
test_deps+=" ${test_pkg}[${LUA_USEDEP}]"
else
test_deps+=" $(lua_gen_cond_dep "
${test_pkg}[\${LUA_USEDEP}]
")"
fi
fi
if [[ -n ${test_deps} ]]; then
IUSE+=" test"
RESTRICT+=" !test? ( test )"
BDEPEND+=" test? ( ${test_deps} )"
fi
# we need to ensure successful return in case we're called last,
# otherwise Portage may wrongly assume sourcing failed
return 0
}
# @FUNCTION: lua_get_CFLAGS
# @USAGE: [<impl>]
# @DESCRIPTION:
# Obtain and print the compiler flags for building against Lua,
# for the given implementation. If no implementation is provided,
# ${ELUA} will be used.
#
# Please note that this function requires Lua and pkg-config installed,
# and therefore proper build-time dependencies need be added to the ebuild.
lua_get_CFLAGS() {
debug-print-function ${FUNCNAME} "${@}"
_lua_export "${@}" LUA_CFLAGS
echo "${LUA_CFLAGS}"
}
# @FUNCTION: lua_get_cmod_dir
# @USAGE: [<impl>]
# @DESCRIPTION:
# Obtain and print the name of the directory into which compiled Lua
# modules are installed, for the given implementation. If no implementation
# is provided, ${ELUA} will be used.
#
# Please note that this function requires Lua and pkg-config installed,
# and therefore proper build-time dependencies need be added to the ebuild.
lua_get_cmod_dir() {
debug-print-function ${FUNCNAME} "${@}"
_lua_export "${@}" LUA_CMOD_DIR
echo "${LUA_CMOD_DIR}"
}
# @FUNCTION: lua_get_include_dir
# @USAGE: [<impl>]
# @DESCRIPTION:
# Obtain and print the name of the directory containing header files
# of the given Lua implementation. If no implementation is provided,
# ${ELUA} will be used.
#
# Please note that this function requires Lua and pkg-config installed,
# and therefore proper build-time dependencies need be added to the ebuild.
lua_get_include_dir() {
debug-print-function ${FUNCNAME} "${@}"
_lua_export "${@}" LUA_INCLUDE_DIR
echo "${LUA_INCLUDE_DIR}"
}
# @FUNCTION: lua_get_LIBS
# @USAGE: [<impl>]
# @DESCRIPTION:
# Obtain and print the compiler flags for linking against Lua,
# for the given implementation. If no implementation is provided,
# ${ELUA} will be used.
#
# Please note that this function requires Lua and pkg-config installed,
# and therefore proper build-time dependencies need be added to the ebuild.
lua_get_LIBS() {
debug-print-function ${FUNCNAME} "${@}"
_lua_export "${@}" LUA_LIBS
echo "${LUA_LIBS}"
}
# @FUNCTION: lua_get_lmod_dir
# @USAGE: [<impl>]
# @DESCRIPTION:
# Obtain and print the name of the directory into which native-Lua
# modules are installed, for the given implementation. If no implementation
# is provided, ${ELUA} will be used.
#
# Please note that this function requires Lua and pkg-config installed,
# and therefore proper build-time dependencies need be added to the ebuild.
lua_get_lmod_dir() {
debug-print-function ${FUNCNAME} "${@}"
_lua_export "${@}" LUA_LMOD_DIR
echo "${LUA_LMOD_DIR}"
}
# @FUNCTION: lua_get_shared_lib
# @USAGE: [<impl>]
# @DESCRIPTION:
# Obtain and print the expected name, with path, of the main shared library
# of the given Lua implementation. If no implementation is provided,
# ${ELUA} will be used.
#
# Note that it is up to the ebuild maintainer to ensure Lua actually
# provides a shared library.
#
# Please note that this function requires Lua and pkg-config installed,
# and therefore proper build-time dependencies need be added to the ebuild.
lua_get_shared_lib() {
debug-print-function ${FUNCNAME} "${@}"
_lua_export "${@}" LUA_SHARED_LIB
echo "${LUA_SHARED_LIB}"
}
# @FUNCTION: lua_get_version
# @USAGE: [<impl>]
# @DESCRIPTION:
# Obtain and print the full version number of the given Lua implementation.
# If no implementation is provided, ${ELUA} will be used.
#
# Please note that this function requires Lua and pkg-config installed,
# and therefore proper build-time dependencies need be added to the ebuild.
lua_get_version() {
debug-print-function ${FUNCNAME} "${@}"
_lua_export "${@}" LUA_VERSION
echo "${LUA_VERSION}"
}
_LUA_UTILS_R0=1
fi

381
eclass/lua.eclass Normal file
View File

@ -0,0 +1,381 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: lua.eclass
# @MAINTAINER:
# William Hubbs <williamh@gentoo.org>
# Marek Szuba <marecki@gentoo.org>
# @AUTHOR:
# Marek Szuba <marecki@gentoo.org>
# Based on python-r1.eclass by Michał Górny <mgorny@gentoo.org> et al.
# @SUPPORTED_EAPIS: 7
# @BLURB: A common eclass for Lua packages
# @DESCRIPTION:
# A common eclass providing helper functions to build and install
# packages supporting being installed for multiple Lua implementations.
#
# This eclass sets correct IUSE. Modification of REQUIRED_USE has to
# be done by the author of the ebuild (but LUA_REQUIRED_USE is
# provided for convenience, see below). The eclass exports LUA_DEPS
# and LUA_USEDEP so you can create correct dependencies for your
# package easily. It also provides methods to easily run a command for
# each enabled Lua implementation and duplicate the sources for them.
#
# Note that since this eclass always inherits lua-utils as well, in ebuilds
# using the former there is no need to explicitly inherit the latter in order
# to use helper functions such as lua_get_CFLAGS.
#
# @EXAMPLE:
# @CODE
# EAPI=7
#
# LUA_COMPAT=( lua5-{1..3} )
#
# inherit lua
#
# [...]
#
# REQUIRED_USE="${LUA_REQUIRED_USE}"
# DEPEND="${LUA_DEPS}"
# RDEPEND="${DEPEND}
# dev-lua/foo[${LUA_USEDEP}]"
# BDEPEND="virtual/pkgconfig"
#
# lua_src_install() {
# emake LUA_VERSION="$(lua_get_version)" install
# }
#
# src_install() {
# lua_foreach_impl lua_src_install
# }
# @CODE
case ${EAPI:-0} in
0|1|2|3|4|5|6)
die "Unsupported EAPI=${EAPI} (too old) for ${ECLASS}"
;;
7)
;;
*)
die "Unsupported EAPI=${EAPI} (unknown) for ${ECLASS}"
;;
esac
if [[ ! ${_LUA_R0} ]]; then
if [[ ${_LUA_SINGLE_R0} ]]; then
die 'lua.eclass cannot be used with lua-single.eclass.'
fi
inherit multibuild lua-utils
fi
# @ECLASS-VARIABLE: LUA_COMPAT
# @REQUIRED
# @PRE_INHERIT
# @DESCRIPTION:
# This variable contains a list of Lua implementations the package
# supports. It must be set before the `inherit' call. It has to be
# an array.
#
# Example:
# @CODE
# LUA_COMPAT=( lua5-1 lua5-2 lua5-3 )
# @CODE
#
# Please note that you can also use bash brace expansion if you like:
# @CODE
# LUA_COMPAT=( lua5-{1..3} )
# @CODE
# @ECLASS-VARIABLE: LUA_COMPAT_OVERRIDE
# @USER_VARIABLE
# @DEFAULT_UNSET
# @DESCRIPTION:
# This variable can be used when working with ebuilds to override
# the in-ebuild LUA_COMPAT. It is a string listing all
# the implementations which package will be built for. It need be
# specified in the calling environment, and not in ebuilds.
#
# It should be noted that in order to preserve metadata immutability,
# LUA_COMPAT_OVERRIDE does not affect IUSE nor dependencies.
# The state of LUA_TARGETS is ignored, and all the implementations
# in LUA_COMPAT_OVERRIDE are built. Dependencies need to be satisfied
# manually.
#
# Example:
# @CODE
# LUA_COMPAT_OVERRIDE='lua5-2' emerge -1v dev-lua/foo
# @CODE
# @ECLASS-VARIABLE: LUA_REQ_USE
# @DEFAULT_UNSET
# @PRE_INHERIT
# @DESCRIPTION:
# The list of USE flags required to be enabled on the chosen Lua
# implementations, formed as a USE-dependency string. It should be valid
# for all implementations in LUA_COMPAT, so it may be necessary to
# use USE defaults.
# This must be set before calling `inherit'.
#
# Example:
# @CODE
# LUA_REQ_USE="deprecated"
# @CODE
#
# It will cause the Lua dependencies to look like:
# @CODE
# lua_targets_luaX-Y? ( dev-lang/lua:X.Y[deprecated] )
# @CODE
# @ECLASS-VARIABLE: BUILD_DIR
# @OUTPUT_VARIABLE
# @DEFAULT_UNSET
# @DESCRIPTION:
# The current build directory. In global scope, it is supposed to
# contain an initial build directory; if unset, it defaults to ${S}.
#
# In functions run by lua_foreach_impl(), the BUILD_DIR is locally
# set to an implementation-specific build directory. That path is
# created through appending a hyphen and the implementation name
# to the final component of the initial BUILD_DIR.
#
# Example value:
# @CODE
# ${WORKDIR}/foo-1.3-lua5-1
# @CODE
# @ECLASS-VARIABLE: LUA_DEPS
# @OUTPUT_VARIABLE
# @DESCRIPTION:
# This is an eclass-generated Lua dependency string for all
# implementations listed in LUA_COMPAT.
#
# Example use:
# @CODE
# RDEPEND="${LUA_DEPS}
# dev-foo/mydep"
# DEPEND="${RDEPEND}"
# @CODE
#
# Example value:
# @CODE
# lua_targets_lua5-1? ( dev-lang/lua:5.1 )
# lua_targets_lua5-2? ( dev-lang/lua:5.2 )
# @CODE
# @ECLASS-VARIABLE: LUA_REQUIRED_USE
# @OUTPUT_VARIABLE
# @DESCRIPTION:
# This is an eclass-generated required-use expression which ensures at
# least one Lua implementation has been enabled.
#
# This expression should be utilized in an ebuild by including it in
# REQUIRED_USE, optionally behind a use flag.
#
# Example use:
# @CODE
# REQUIRED_USE="lua? ( ${LUA_REQUIRED_USE} )"
# @CODE
#
# Example value:
# @CODE
# || ( lua_targets_lua5-1 lua_targets_lua5-2 )
# @CODE
# @ECLASS-VARIABLE: LUA_USEDEP
# @OUTPUT_VARIABLE
# @DESCRIPTION:
# This is an eclass-generated USE-dependency string which can be used to
# depend on another Lua package being built for the same Lua
# implementations.
#
# Example use:
# @CODE
# RDEPEND="dev-lua/foo[${LUA_USEDEP}]"
# @CODE
#
# Example value:
# @CODE
# lua_targets_lua5-1(-)?,lua_targets_lua5-2(-)?
# @CODE
if [[ ! ${_LUA_R0} ]]; then
# @FUNCTION: _lua_validate_useflags
# @INTERNAL
# @DESCRIPTION:
# Enforce the proper setting of LUA_TARGETS, if LUA_COMPAT_OVERRIDE
# is not in effect. If it is, just warn that the flags will be ignored.
_lua_validate_useflags() {
debug-print-function ${FUNCNAME} "${@}"
if [[ ${LUA_COMPAT_OVERRIDE} ]]; then
if [[ ! ${_LUA_COMPAT_OVERRIDE_WARNED} ]]; then
ewarn "WARNING: LUA_COMPAT_OVERRIDE in effect. The following Lua"
ewarn "implementations will be enabled:"
ewarn
ewarn " ${LUA_COMPAT_OVERRIDE}"
ewarn
ewarn "Dependencies won't be satisfied, and LUA_TARGETS will be ignored."
_LUA_COMPAT_OVERRIDE_WARNED=1
fi
# we do not use flags with LCO
return
fi
local i
for i in "${_LUA_SUPPORTED_IMPLS[@]}"; do
use "lua_targets_${i}" && return 0
done
eerror "No Lua implementation selected for the build. Please add one"
eerror "of the following values to your LUA_TARGETS"
eerror "(in make.conf or package.use):"
eerror
eerror "${LUA_COMPAT[@]}"
echo
die "No supported Lua implementation in LUA_TARGETS."
}
# @FUNCTION: _lua_obtain_impls
# @INTERNAL
# @DESCRIPTION:
# Set up the enabled implementation list.
_lua_obtain_impls() {
_lua_validate_useflags
if [[ ${LUA_COMPAT_OVERRIDE} ]]; then
MULTIBUILD_VARIANTS=( ${LUA_COMPAT_OVERRIDE} )
return
fi
MULTIBUILD_VARIANTS=()
local impl
for impl in "${_LUA_SUPPORTED_IMPLS[@]}"; do
has "${impl}" "${LUA_COMPAT[@]}" && \
use "lua_targets_${impl}" && MULTIBUILD_VARIANTS+=( "${impl}" )
done
}
# @FUNCTION: _lua_multibuild_wrapper
# @USAGE: <command> [<args>...]
# @INTERNAL
# @DESCRIPTION:
# Initialize the environment for the Lua implementation selected
# for multibuild.
_lua_multibuild_wrapper() {
debug-print-function ${FUNCNAME} "${@}"
local -x ELUA LUA
_lua_export "${MULTIBUILD_VARIANT}" ELUA LUA
local -x PATH=${PATH} PKG_CONFIG_PATH=${PKG_CONFIG_PATH}
_lua_wrapper_setup
"${@}"
}
# @FUNCTION: lua_copy_sources
# @DESCRIPTION:
# Create a single copy of the package sources for each enabled Lua
# implementation.
#
# The sources are always copied from initial BUILD_DIR (or S if unset)
# to implementation-specific build directory matching BUILD_DIR used by
# lua_foreach_abi().
lua_copy_sources() {
debug-print-function ${FUNCNAME} "${@}"
local MULTIBUILD_VARIANTS
_lua_obtain_impls
multibuild_copy_sources
}
# @FUNCTION: lua_foreach_impl
# @USAGE: <command> [<args>...]
# @DESCRIPTION:
# Run the given command for each of the enabled Lua implementations.
# If additional parameters are passed, they will be passed through
# to the command.
#
# The function will return 0 status if all invocations succeed.
# Otherwise, the return code from first failing invocation will
# be returned.
#
# For each command being run, ELUA, LUA and BUILD_DIR are set
# locally, and the former two are exported to the command environment.
lua_foreach_impl() {
debug-print-function ${FUNCNAME} "${@}"
local MULTIBUILD_VARIANTS
_lua_obtain_impls
multibuild_foreach_variant _lua_multibuild_wrapper "${@}"
}
_LUA_R0=1
fi
# @FUNCTION: _lua_set_globals
# @INTERNAL
# @DESCRIPTION:
# Sets all the global output variables provided by this eclass.
# This function must be called once, in global scope.
_lua_set_globals() {
local deps i LUA_PKG_DEP
_lua_set_impls
for i in "${_LUA_SUPPORTED_IMPLS[@]}"; do
_lua_export "${i}" LUA_PKG_DEP
deps+="lua_targets_${i}? ( ${LUA_PKG_DEP} ) "
done
local flags=( "${_LUA_SUPPORTED_IMPLS[@]/#/lua_targets_}" )
local optflags=${flags[@]/%/(-)?}
local requse="|| ( ${flags[*]} )"
local usedep=${optflags// /,}
if [[ ${LUA_DEPS+1} ]]; then
# IUSE is magical, so we can't really check it
# (but we verify LUA_COMPAT already)
if [[ ${LUA_DEPS} != "${deps}" ]]; then
eerror "LUA_DEPS have changed between inherits (LUA_REQ_USE?)!"
eerror "Before: ${LUA_DEPS}"
eerror "Now : ${deps}"
die "LUA_DEPS integrity check failed"
fi
# these two are formality -- they depend on LUA_COMPAT only
if [[ ${LUA_REQUIRED_USE} != ${requse} ]]; then
eerror "LUA_REQUIRED_USE have changed between inherits!"
eerror "Before: ${LUA_REQUIRED_USE}"
eerror "Now : ${requse}"
die "LUA_REQUIRED_USE integrity check failed"
fi
if [[ ${LUA_USEDEP} != "${usedep}" ]]; then
eerror "LUA_USEDEP have changed between inherits!"
eerror "Before: ${LUA_USEDEP}"
eerror "Now : ${usedep}"
die "LUA_USEDEP integrity check failed"
fi
else
IUSE=${flags[*]}
LUA_DEPS=${deps}
LUA_REQUIRED_USE=${requse}
LUA_USEDEP=${usedep}
readonly LUA_DEPS LUA_REQUIRED_USE
fi
}
_lua_set_globals
unset -f _lua_set_globals

View File

@ -0,0 +1,61 @@
# Copyright 1999-2017 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: mate-desktop.org.eclass
# @MAINTAINER:
# mate@gentoo.org
# @AUTHOR:
# Authors: NP-Hardass <NP-Hardass@gentoo.org> based upon the gnome.org eclass.
# @SUPPORTED_EAPIS: 6
# @BLURB: Helper eclass for mate-desktop.org hosted archives
# @DESCRIPTION:
# Provide a default SRC_URI and EGIT_REPO_URI for MATE packages as well as
# exporting some useful values like the MATE_BRANCH
# EAPIs < 6 are banned.
case "${EAPI:-0}" in
6) ;;
*) die "EAPI=${EAPI:-0} is not supported" ;;
esac
if [[ ${PV} == 9999 ]]; then
inherit git-r3
fi
inherit versionator
# @ECLASS-VARIABLE: MATE_TARBALL_SUFFIX
# @INTERNAL
# @DESCRIPTION:
# All projects hosted on mate-desktop.org provide tarballs as tar.xz.
# Undefined in live ebuilds.
[[ ${PV} != 9999 ]] && : ${MATE_TARBALL_SUFFIX:="xz"}
# @ECLASS-VARIABLE: MATE_DESKTOP_ORG_PN
# @DESCRIPTION:
# Name of the package as hosted on mate-desktop.org.
# Leave unset if package name matches PN.
: ${MATE_DESKTOP_ORG_PN:=$PN}
# @ECLASS-VARIABLE: MATE_DESKTOP_ORG_PV
# @DESCRIPTION:
# Package version string as listed on mate-desktop.org.
# Leave unset if package version string matches PV.
: ${MATE_DESKTOP_ORG_PV:=$PV}
# @ECLASS-VARIABLE: MATE_BRANCH
# @DESCRIPTION:
# Major and minor numbers of the version number, unless live.
# If live ebuild, will be set to '9999'.
: ${MATE_BRANCH:=$(get_version_component_range 1-2)}
# Set SRC_URI or EGIT_REPO_URI based on whether live
if [[ ${PV} == 9999 ]]; then
EGIT_REPO_URI="https://github.com/mate-desktop/${MATE_DESKTOP_ORG_PN}.git"
SRC_URI=""
else
SRC_URI="https://pub.mate-desktop.org/releases/${MATE_BRANCH}/${MATE_DESKTOP_ORG_PN}-${MATE_DESKTOP_ORG_PV}.tar.${MATE_TARBALL_SUFFIX}"
fi
# Set HOMEPAGE for all ebuilds
HOMEPAGE="https://mate-desktop.org"

162
eclass/mate.eclass Normal file
View File

@ -0,0 +1,162 @@
# Copyright 1999-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: mate.eclass
# @MAINTAINER:
# mate@gentoo.org
# @AUTHOR:
# Authors: NP-Hardass <NP-Hardass@gentoo.org> based upon the gnome2
# and autotools-utils eclasses
# @SUPPORTED_EAPIS: 6
# @BLURB: Provides phases for MATE based packages.
# @DESCRIPTION:
# Exports portage base functions used by ebuilds written for packages using the
# MATE framework. Occassionally acts as a wrapper to gnome2 due to the
# fact that MATE is a GNOME fork. For additional functions, see gnome2-utils.eclass.
# Check EAPI only
case "${EAPI:-0}" in
6) ;;
*) die "EAPI=${EAPI:-0} is not supported" ;;
esac
# Inherit happens below after declaration of GNOME2_LA_PUNT
# @ECLASS-VARIABLE: MATE_LA_PUNT
# @DESCRIPTION:
# Available values for MATE_LA_PUNT:
# - "no": will not clean any .la files
# - "yes": will run prune_libtool_files --modules
# - If it is not set, it will run prune_libtool_files
# MATE_LA_PUNT is a stub to GNOME2_LA_PUNT
MATE_LA_PUNT=${MATE_LA_PUNT:-""}
GNOME2_LA_PUNT="${MATE_LA_PUNT}"
inherit gnome2 autotools mate-desktop.org
case "${EAPI:-0}" in
6) EXPORT_FUNCTIONS src_prepare src_configure src_install pkg_preinst pkg_postinst pkg_postrm ;;
*) die "EAPI=${EAPI:-0} is not supported" ;;
esac
# Autotools requires our MATE m4 files
DEPEND=">=mate-base/mate-common-${MATE_BRANCH}"
# @FUNCTION: mate_py_cond_func_wrap
# @DESCRIPTION:
# Wraps a function for conditional python use, to run for each
# python implementation in the build directory.
# This function should only be used if the ebuild also inherits the
# python-r1 eclass
mate_py_cond_func_wrap() {
if [[ ! ${_PYTHON_R1} ]]; then
die "This function requires the inheritence of the python-r1 eclass"
fi
if use python; then
python_foreach_impl run_in_build_dir "$@"
else
$@
fi
}
# @ECLASS-VARIABLE: MATE_FORCE_AUTORECONF
# @DESCRIPTION:
# Available values for MATE_FORCE_AUTORECONF:
# - true: will always run eautoreconf
# - false: will default to automatic detect
# - If it is not set, it will default to false
: ${MATE_FORCE_AUTORECONF:="false"}
# @FUNCTION: ematedocize
# @DESCRIPTION:
# A wrapper around mate-doc-common
ematedocize() {
ebegin "Running mate-doc-common --copy"
mate-doc-common --copy || die
eend $?
}
# @FUNCTION: want_mate_doc
# @DESCRIPTION:
# Returns true/false based on whether eautoreconf should call
# ematedocize
want_mate_doc() {
grep -q USE_COMMON_DOC_BUILD autogen.sh
}
# @FUNCTION: mate_src_prepare
# @DESCRIPTION:
# Call gnome2_src_prepare to handle environment setup and patching, then
# call eautoreconf if necessary
mate_src_prepare() {
debug-print-function ${FUNCNAME} "$@"
local force_autoreconf=${MATE_FORCE_AUTORECONF}
[[ ${PV} == 9999 ]] && force_autoreconf="true"
gen_chksum() {
find '(' -name 'Makefile.am' \
-o -name 'configure.ac' \
-o -name 'configure.in' ')' \
-exec cksum {} + | sort -k2
}
local chksum=$(gen_chksum)
gnome2_src_prepare "$@"
if [[ "${force_autoreconf}" == "true" ]] || [[ ${chksum} != $(gen_chksum) ]]; then
want_mate_doc && ematedocize
AT_NOELIBTOOLIZE="yes" eautoreconf # gnome2_src_prepare calls elibtoolize
fi
}
# @FUNCTION: mate_src_configure
# @DESCRIPTION:
# MATE specific configure handling
# Stub to gnome2_src_configure()
mate_src_configure() {
local mateconf=()
# Pass --disable-static whenever possible
if ! in_iuse static-libs || ! use static-libs; then
if grep -q "enable-static" "${ECONF_SOURCE:-.}"/configure; then
mateconf+=( --disable-static )
fi
fi
gnome2_src_configure "${mateconf[@]}" "$@"
}
# @FUNCTION: mate_src_install
# @DESCRIPTION:
# MATE specific install. Stub to gnome2_src_install
mate_src_install() {
gnome2_src_install "$@"
}
# @FUNCTION: mate_pkg_preinst
# @DESCRIPTION:
# Finds Icons, GConf and GSettings schemas for later handling in pkg_postinst
# Stub to gnome2_pkg_preinst
mate_pkg_preinst() {
gnome2_pkg_preinst "$@"
}
# @FUNCTION: mate_pkg_postinst
# @DESCRIPTION:
# Handle scrollkeeper, GConf, GSettings, Icons, desktop and mime
# database updates.
# Stub to gnome2_pkg_postinst
mate_pkg_postinst() {
gnome2_pkg_postinst "$@"
}
# @FUNCTION: mate_pkg_postrm
# @DESCRIPTION:
# Handle scrollkeeper, GSettings, Icons, desktop and mime database updates.
# Stub to gnome2_pkg_postrm
mate_pkg_postrm() {
gnome2_pkg_postrm "$@"
}

203
eclass/mercurial.eclass Normal file
View File

@ -0,0 +1,203 @@
# Copyright 1999-2019 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: mercurial.eclass
# @MAINTAINER:
# Christoph Junghans <junghans@gentoo.org>
# @AUTHOR:
# Next gen author: Krzysztof Pawlik <nelchael@gentoo.org>
# Original author: Aron Griffis <agriffis@gentoo.org>
# @BLURB: This eclass provides generic mercurial fetching functions
# @DESCRIPTION:
# This eclass provides generic mercurial fetching functions. To fetch sources
# from mercurial repository just set EHG_REPO_URI to correct repository URI. If
# you need to share single repository between several ebuilds set EHG_PROJECT to
# project name in all of them.
inherit eutils
EXPORT_FUNCTIONS src_unpack
PROPERTIES+=" live"
DEPEND="dev-vcs/mercurial"
# @ECLASS-VARIABLE: EHG_REPO_URI
# @DESCRIPTION:
# Mercurial repository URI.
# @ECLASS-VARIABLE: EHG_REVISION
# @DESCRIPTION:
# Create working directory for specified revision, defaults to default.
#
# EHG_REVISION is passed as a value for --updaterev parameter, so it can be more
# than just a revision, please consult `hg help revisions' for more details.
: ${EHG_REVISION:="default"}
# @ECLASS-VARIABLE: EHG_STORE_DIR
# @DESCRIPTION:
# Mercurial sources store directory. Users may override this in /etc/portage/make.conf
[[ -z "${EHG_STORE_DIR}" ]] && EHG_STORE_DIR="${PORTAGE_ACTUAL_DISTDIR:-${DISTDIR}}/hg-src"
# @ECLASS-VARIABLE: EHG_PROJECT
# @DESCRIPTION:
# Project name.
#
# This variable default to $PN, but can be changed to allow repository sharing
# between several ebuilds.
[[ -z "${EHG_PROJECT}" ]] && EHG_PROJECT="${PN}"
# @ECLASS-VARIABLE: EGIT_CHECKOUT_DIR
# @DESCRIPTION:
# The directory to check the hg sources out to.
#
# EHG_CHECKOUT_DIR=${S}
# @ECLASS-VARIABLE: EHG_QUIET
# @DESCRIPTION:
# Suppress some extra noise from mercurial, set it to 'ON' to be quiet.
: ${EHG_QUIET:="OFF"}
[[ "${EHG_QUIET}" == "ON" ]] && EHG_QUIET_CMD_OPT="--quiet"
# @ECLASS-VARIABLE: EHG_CONFIG
# @DESCRIPTION:
# Extra config option to hand to hg clone/pull
# @ECLASS-VARIABLE: EHG_CLONE_CMD
# @DESCRIPTION:
# Command used to perform initial repository clone.
[[ -z "${EHG_CLONE_CMD}" ]] && EHG_CLONE_CMD="hg clone ${EHG_CONFIG:+--config ${EHG_CONFIG}} ${EHG_QUIET_CMD_OPT} --pull --noupdate"
# @ECLASS-VARIABLE: EHG_PULL_CMD
# @DESCRIPTION:
# Command used to update repository.
[[ -z "${EHG_PULL_CMD}" ]] && EHG_PULL_CMD="hg pull ${EHG_CONFIG:+--config ${EHG_CONFIG}} ${EHG_QUIET_CMD_OPT}"
# @ECLASS-VARIABLE: EHG_OFFLINE
# @DESCRIPTION:
# Set this variable to a non-empty value to disable the automatic updating of
# a mercurial source tree. This is intended to be set outside the ebuild by
# users.
EHG_OFFLINE="${EHG_OFFLINE:-${EVCS_OFFLINE}}"
# @FUNCTION: mercurial_fetch
# @USAGE: [repository_uri] [module] [sourcedir]
# @DESCRIPTION:
# Clone or update repository.
#
# If repository URI is not passed it defaults to EHG_REPO_URI, if module is
# empty it defaults to basename of EHG_REPO_URI, sourcedir defaults to
# EHG_CHECKOUT_DIR, which defaults to S.
mercurial_fetch() {
debug-print-function ${FUNCNAME} "${@}"
has "${EAPI:-0}" 0 1 2 && ! use prefix && EPREFIX=
EHG_REPO_URI=${1-${EHG_REPO_URI}}
[[ -z "${EHG_REPO_URI}" ]] && die "EHG_REPO_URI is empty"
local module="${2-$(basename "${EHG_REPO_URI}")}"
local sourcedir="${3:-${EHG_CHECKOUT_DIR:-${S}}}"
# Should be set but blank to prevent using $HOME/.hgrc
export HGRCPATH=
# Check ${EHG_STORE_DIR} directory:
addwrite "$(dirname "${EHG_STORE_DIR}")" || die "addwrite failed"
if [[ ! -d "${EHG_STORE_DIR}" ]]; then
mkdir -p "${EHG_STORE_DIR}" || die "failed to create ${EHG_STORE_DIR}"
chmod -f g+rw "${EHG_STORE_DIR}" || \
die "failed to chown ${EHG_STORE_DIR}"
fi
# Create project directory:
mkdir -p "${EHG_STORE_DIR}/${EHG_PROJECT}" || \
die "failed to create ${EHG_STORE_DIR}/${EHG_PROJECT}"
chmod -f g+rw "${EHG_STORE_DIR}/${EHG_PROJECT}" || \
echo "Warning: failed to chmod g+rw ${EHG_PROJECT}"
pushd "${EHG_STORE_DIR}/${EHG_PROJECT}" > /dev/null || \
die "failed to cd to ${EHG_STORE_DIR}/${EHG_PROJECT}"
# Clone/update repository:
if [[ ! -d "${module}" ]]; then
einfo "Cloning ${EHG_REPO_URI} to ${EHG_STORE_DIR}/${EHG_PROJECT}/${module}"
${EHG_CLONE_CMD} "${EHG_REPO_URI}" "${module}" || {
rm -rf "${module}"
die "failed to clone ${EHG_REPO_URI}"
}
elif [[ -z "${EHG_OFFLINE}" ]]; then
einfo "Updating ${EHG_STORE_DIR}/${EHG_PROJECT}/${module} from ${EHG_REPO_URI}"
pushd "${module}" > /dev/null || die "failed to cd to ${module}"
${EHG_PULL_CMD} "${EHG_REPO_URI}" || die "update failed"
popd > /dev/null || die
fi
popd > /dev/null || die
# Checkout working copy:
einfo "Creating working directory in ${sourcedir} (target revision: ${EHG_REVISION})"
mkdir -p "${sourcedir}" || die "failed to create ${sourcedir}"
hg clone \
${EHG_QUIET_CMD_OPT} \
--updaterev="${EHG_REVISION}" \
${EHG_CONFIG:+--config ${EHG_CONFIG}} \
"${EHG_STORE_DIR}/${EHG_PROJECT}/${module}" \
"${sourcedir}" || die "hg clone failed"
# An exact revision helps a lot for testing purposes, so have some output...
# id num branch
# fd6e32d61721 6276 default
local HG_REVDATA=($(hg identify -b -i "${sourcedir}"))
export HG_REV_ID=${HG_REVDATA[0]}
local HG_REV_BRANCH=${HG_REVDATA[1]}
einfo "Work directory: ${sourcedir} global id: ${HG_REV_ID} (was ${EHG_REVISION} branch: ${HG_REV_BRANCH}"
}
# @FUNCTION: mercurial_bootstrap
# @INTERNAL
# @DESCRIPTION:
# Internal function that runs bootstrap command on unpacked source.
mercurial_bootstrap() {
debug-print-function ${FUNCNAME} "$@"
# @ECLASS-VARIABLE: EHG_BOOTSTRAP
# @DESCRIPTION:
# Command to be executed after checkout and clone of the specified
# repository.
if [[ ${EHG_BOOTSTRAP} ]]; then
pushd "${S}" > /dev/null
einfo "Starting bootstrap"
if [[ -f ${EHG_BOOTSTRAP} ]]; then
# we have file in the repo which we should execute
debug-print "${FUNCNAME}: bootstraping with file \"${EHG_BOOTSTRAP}\""
if [[ -x ${EHG_BOOTSTRAP} ]]; then
eval "./${EHG_BOOTSTRAP}" \
|| die "${FUNCNAME}: bootstrap script failed"
else
eerror "\"${EHG_BOOTSTRAP}\" is not executable."
eerror "Report upstream, or bug ebuild maintainer to remove bootstrap command."
die "\"${EHG_BOOTSTRAP}\" is not executable"
fi
else
# we execute some system command
debug-print "${FUNCNAME}: bootstraping with commands \"${EHG_BOOTSTRAP}\""
eval "${EHG_BOOTSTRAP}" \
|| die "${FUNCNAME}: bootstrap commands failed"
fi
einfo "Bootstrap finished"
popd > /dev/null
fi
}
# @FUNCTION: mercurial_src_unpack
# @DESCRIPTION:
# The mercurial src_unpack function, which will be exported.
function mercurial_src_unpack {
debug-print-function ${FUNCNAME} "$@"
mercurial_fetch
mercurial_bootstrap
}

415
eclass/meson.eclass Normal file
View File

@ -0,0 +1,415 @@
# Copyright 2017-2020 Gentoo Authors
# Distributed under the terms of the GNU General Public License v2
# @ECLASS: meson.eclass
# @MAINTAINER:
# William Hubbs <williamh@gentoo.org>
# Mike Gilbert <floppym@gentoo.org>
# @SUPPORTED_EAPIS: 6 7
# @BLURB: common ebuild functions for meson-based packages
# @DESCRIPTION:
# This eclass contains the default phase functions for packages which
# use the meson build system.
#
# @EXAMPLE:
# Typical ebuild using meson.eclass:
#
# @CODE
# EAPI=6
#
# inherit meson
#
# ...
#
# src_configure() {
# local emesonargs=(
# $(meson_use qt4)
# $(meson_feature threads)
# $(meson_use bindist official_branding)
# )
# meson_src_configure
# }
#
# ...
#
# @CODE
case ${EAPI:-0} in
6|7) ;;
*) die "EAPI=${EAPI} is not supported" ;;
esac
if [[ -z ${_MESON_ECLASS} ]]; then
inherit multiprocessing ninja-utils python-utils-r1 toolchain-funcs
if [[ ${EAPI} == 6 ]]; then
inherit eapi7-ver
fi
fi
EXPORT_FUNCTIONS src_configure src_compile src_test src_install
if [[ -z ${_MESON_ECLASS} ]]; then
_MESON_ECLASS=1
MESON_DEPEND=">=dev-util/meson-0.54.0
>=dev-util/ninja-1.8.2
dev-util/meson-format-array
"
if [[ ${EAPI:-0} == [6] ]]; then
DEPEND=${MESON_DEPEND}
else
BDEPEND=${MESON_DEPEND}
fi
# @ECLASS-VARIABLE: BUILD_DIR
# @DEFAULT_UNSET
# @DESCRIPTION:
# Build directory, location where all generated files should be placed.
# If this isn't set, it defaults to ${WORKDIR}/${P}-build.
# @ECLASS-VARIABLE: EMESON_SOURCE
# @DEFAULT_UNSET
# @DESCRIPTION:
# The location of the source files for the project; this is the source
# directory to pass to meson.
# If this isn't set, it defaults to ${S}
# @VARIABLE: emesonargs
# @DEFAULT_UNSET
# @DESCRIPTION:
# Optional meson arguments as Bash array; this should be defined before
# calling meson_src_configure.
# @VARIABLE: emesontestargs
# @DEFAULT_UNSET
# @DESCRIPTION:
# Optional meson test arguments as Bash array; this should be defined before
# calling meson_src_test.
# @VARIABLE: MYMESONARGS
# @DEFAULT_UNSET
# @DESCRIPTION:
# User-controlled environment variable containing arguments to be passed to
# meson in meson_src_configure.
# @FUNCTION: _meson_env_array
# @INTERNAL
# @DESCRIPTION:
# Parses the command line flags and converts them into an array suitable for
# use in a cross file.
#
# Input: --single-quote=\' --double-quote=\" --dollar=\$ --backtick=\`
# --backslash=\\ --full-word-double="Hello World"
# --full-word-single='Hello World'
# --full-word-backslash=Hello\ World
# --simple --unicode-8=© --unicode-16=𐐷 --unicode-32=𐤅
#
# Output: ['--single-quote=\'', '--double-quote="', '--dollar=$',
# '--backtick=`', '--backslash=\\', '--full-word-double=Hello World',
# '--full-word-single=Hello World',
# '--full-word-backslash=Hello World', '--simple', '--unicode-8=©',
# '--unicode-16=𐐷', '--unicode-32=𐤅']
#
_meson_env_array() {
meson-format-array "$@"
}
# @FUNCTION: _meson_get_machine_info
# @USAGE: <tuple>
# @RETURN: system/cpu_family/cpu variables
# @INTERNAL
# @DESCRIPTION:
# Translate toolchain tuple into machine values for meson.
_meson_get_machine_info() {
local tuple=$1
# system roughly corresponds to uname -s (lowercase)
case ${tuple} in
*-aix*) system=aix ;;
*-cygwin*) system=cygwin ;;
*-darwin*) system=darwin ;;
*-freebsd*) system=freebsd ;;
*-linux*) system=linux ;;
mingw*|*-mingw*) system=windows ;;
*-solaris*) system=sunos ;;
esac
cpu_family=$(tc-arch "${tuple}")
case ${cpu_family} in
amd64) cpu_family=x86_64 ;;
arm64) cpu_family=aarch64 ;;
esac
# This may require adjustment based on CFLAGS
cpu=${tuple%%-*}
}
# @FUNCTION: _meson_create_cross_file
# @RETURN: path to cross file
# @INTERNAL
# @DESCRIPTION:
# Creates a cross file. meson uses this to define settings for
# cross-compilers. This function is called from meson_src_configure.
_meson_create_cross_file() {
local system cpu_family cpu
_meson_get_machine_info "${CHOST}"
local fn=${T}/meson.${CHOST}.${ABI}.ini
cat > "${fn}" <<-EOF
[binaries]
ar = $(_meson_env_array "$(tc-getAR)")
c = $(_meson_env_array "$(tc-getCC)")
cpp = $(_meson_env_array "$(tc-getCXX)")
fortran = $(_meson_env_array "$(tc-getFC)")
llvm-config = '$(tc-getPROG LLVM_CONFIG llvm-config)'
nm = $(_meson_env_array "$(tc-getNM)")
objc = $(_meson_env_array "$(tc-getPROG OBJC cc)")
objcpp = $(_meson_env_array "$(tc-getPROG OBJCXX c++)")
pkgconfig = '$(tc-getPKG_CONFIG)'
strip = $(_meson_env_array "$(tc-getSTRIP)")
windres = $(_meson_env_array "$(tc-getRC)")
[properties]
c_args = $(_meson_env_array "${CFLAGS} ${CPPFLAGS}")
c_link_args = $(_meson_env_array "${CFLAGS} ${LDFLAGS}")
cpp_args = $(_meson_env_array "${CXXFLAGS} ${CPPFLAGS}")
cpp_link_args = $(_meson_env_array "${CXXFLAGS} ${LDFLAGS}")
fortran_args = $(_meson_env_array "${FCFLAGS}")
fortran_link_args = $(_meson_env_array "${FCFLAGS} ${LDFLAGS}")
objc_args = $(_meson_env_array "${OBJCFLAGS} ${CPPFLAGS}")
objc_link_args = $(_meson_env_array "${OBJCFLAGS} ${LDFLAGS}")
objcpp_args = $(_meson_env_array "${OBJCXXFLAGS} ${CPPFLAGS}")
objcpp_link_args = $(_meson_env_array "${OBJCXXFLAGS} ${LDFLAGS}")
needs_exe_wrapper = true
sys_root = '${SYSROOT}'
pkg_config_libdir = '${PKG_CONFIG_LIBDIR:-${EPREFIX}/usr/$(get_libdir)/pkgconfig}'
[host_machine]
system = '${system}'
cpu_family = '${cpu_family}'
cpu = '${cpu}'
endian = '$(tc-endian "${CHOST}")'
EOF
echo "${fn}"
}
# @FUNCTION: _meson_create_native_file
# @RETURN: path to native file
# @INTERNAL
# @DESCRIPTION:
# Creates a native file. meson uses this to define settings for
# native compilers. This function is called from meson_src_configure.
_meson_create_native_file() {
local system cpu_family cpu
_meson_get_machine_info "${CBUILD}"
local fn=${T}/meson.${CBUILD}.${ABI}.ini
cat > "${fn}" <<-EOF
[binaries]
ar = $(_meson_env_array "$(tc-getBUILD_AR)")
c = $(_meson_env_array "$(tc-getBUILD_CC)")
cpp = $(_meson_env_array "$(tc-getBUILD_CXX)")
fortran = $(_meson_env_array "$(tc-getBUILD_PROG FC gfortran)")
llvm-config = '$(tc-getBUILD_PROG LLVM_CONFIG llvm-config)'
nm = $(_meson_env_array "$(tc-getBUILD_NM)")
objc = $(_meson_env_array "$(tc-getBUILD_PROG OBJC cc)")
objcpp = $(_meson_env_array "$(tc-getBUILD_PROG OBJCXX c++)")
pkgconfig = '$(tc-getBUILD_PKG_CONFIG)'
strip = $(_meson_env_array "$(tc-getBUILD_STRIP)")
windres = $(_meson_env_array "$(tc-getBUILD_PROG RC windres)")
[properties]
c_args = $(_meson_env_array "${BUILD_CFLAGS} ${BUILD_CPPFLAGS}")
c_link_args = $(_meson_env_array "${BUILD_CFLAGS} ${BUILD_LDFLAGS}")
cpp_args = $(_meson_env_array "${BUILD_CXXFLAGS} ${BUILD_CPPFLAGS}")
cpp_link_args = $(_meson_env_array "${BUILD_CXXFLAGS} ${BUILD_LDFLAGS}")
fortran_args = $(_meson_env_array "${BUILD_FCFLAGS}")
fortran_link_args = $(_meson_env_array "${BUILD_FCFLAGS} ${BUILD_LDFLAGS}")
objc_args = $(_meson_env_array "${BUILD_OBJCFLAGS} ${BUILD_CPPFLAGS}")
objc_link_args = $(_meson_env_array "${BUILD_OBJCFLAGS} ${BUILD_LDFLAGS}")
objcpp_args = $(_meson_env_array "${BUILD_OBJCXXFLAGS} ${BUILD_CPPFLAGS}")
objcpp_link_args = $(_meson_env_array "${BUILD_OBJCXXFLAGS} ${BUILD_LDFLAGS}")
needs_exe_wrapper = false
pkg_config_libdir = '${BUILD_PKG_CONFIG_LIBDIR:-${EPREFIX}/usr/$(get_libdir)/pkgconfig}'
[build_machine]
system = '${system}'
cpu_family = '${cpu_family}'
cpu = '${cpu}'
endian = '$(tc-endian "${CBUILD}")'
EOF
echo "${fn}"
}
# @FUNCTION: meson_use
# @USAGE: <USE flag> [option name]
# @DESCRIPTION:
# Given a USE flag and meson project option, outputs a string like:
#
# -Doption=true
# -Doption=false
#
# If the project option is unspecified, it defaults to the USE flag.
meson_use() {
usex "$1" "-D${2-$1}=true" "-D${2-$1}=false"
}
# @FUNCTION: meson_feature
# @USAGE: <USE flag> [option name]
# @DESCRIPTION:
# Given a USE flag and meson project option, outputs a string like:
#
# -Doption=enabled
# -Doption=disabled
#
# If the project option is unspecified, it defaults to the USE flag.
meson_feature() {
usex "$1" "-D${2-$1}=enabled" "-D${2-$1}=disabled"
}
# @FUNCTION: meson_src_configure
# @USAGE: [extra meson arguments]
# @DESCRIPTION:
# This is the meson_src_configure function.
meson_src_configure() {
debug-print-function ${FUNCNAME} "$@"
local BUILD_CFLAGS=${BUILD_CFLAGS}
local BUILD_CPPFLAGS=${BUILD_CPPFLAGS}
local BUILD_CXXFLAGS=${BUILD_CXXFLAGS}
local BUILD_FCFLAGS=${BUILD_FCFLAGS}
local BUILD_OBJCFLAGS=${BUILD_OBJCFLAGS}
local BUILD_OBJCXXFLAGS=${BUILD_OBJCXXFLAGS}
local BUILD_LDFLAGS=${BUILD_LDFLAGS}
local BUILD_PKG_CONFIG_LIBDIR=${BUILD_PKG_CONFIG_LIBDIR}
local BUILD_PKG_CONFIG_PATH=${BUILD_PKG_CONFIG_PATH}
if tc-is-cross-compiler; then
: ${BUILD_CFLAGS:=-O1 -pipe}
: ${BUILD_CXXFLAGS:=-O1 -pipe}
: ${BUILD_FCFLAGS:=-O1 -pipe}
: ${BUILD_OBJCFLAGS:=-O1 -pipe}
: ${BUILD_OBJCXXFLAGS:=-O1 -pipe}
else
: ${BUILD_CFLAGS:=${CFLAGS}}
: ${BUILD_CPPFLAGS:=${CPPFLAGS}}
: ${BUILD_CXXFLAGS:=${CXXFLAGS}}
: ${BUILD_FCFLAGS:=${FCFLAGS}}
: ${BUILD_LDFLAGS:=${LDFLAGS}}
: ${BUILD_OBJCFLAGS:=${OBJCFLAGS}}
: ${BUILD_OBJCXXFLAGS:=${OBJCXXFLAGS}}
: ${BUILD_PKG_CONFIG_LIBDIR:=${PKG_CONFIG_LIBDIR}}
: ${BUILD_PKG_CONFIG_PATH:=${PKG_CONFIG_PATH}}
fi
local mesonargs=(
meson setup
--buildtype plain
--libdir "$(get_libdir)"
--localstatedir "${EPREFIX}/var/lib"
--prefix "${EPREFIX}/usr"
--sysconfdir "${EPREFIX}/etc"
--wrap-mode nodownload
--build.pkg-config-path "${BUILD_PKG_CONFIG_PATH}${BUILD_PKG_CONFIG_PATH:+:}${EPREFIX}/usr/share/pkgconfig"
--pkg-config-path "${PKG_CONFIG_PATH}${PKG_CONFIG_PATH:+:}${EPREFIX}/usr/share/pkgconfig"
--native-file "$(_meson_create_native_file)"
)
if tc-is-cross-compiler; then
mesonargs+=( --cross-file "$(_meson_create_cross_file)" )
fi
BUILD_DIR="${BUILD_DIR:-${WORKDIR}/${P}-build}"
# Handle quoted whitespace
eval "local -a MYMESONARGS=( ${MYMESONARGS} )"
mesonargs+=(
# Arguments from ebuild
"${emesonargs[@]}"
# Arguments passed to this function
"$@"
# Arguments from user
"${MYMESONARGS[@]}"
# Source directory
"${EMESON_SOURCE:-${S}}"
# Build directory
"${BUILD_DIR}"
)
# Used by symbolextractor.py
# https://bugs.gentoo.org/717720
tc-export NM
tc-getPROG READELF readelf >/dev/null
# https://bugs.gentoo.org/625396
python_export_utf8_locale
# https://bugs.gentoo.org/721786
local -x BOOST_INCLUDEDIR="${BOOST_INCLUDEDIR-${EPREFIX}/usr/include}"
local -x BOOST_LIBRARYDIR="${BOOST_LIBRARYDIR-${EPREFIX}/usr/$(get_libdir)}"
(
export -n {C,CPP,CXX,F,OBJC,OBJCXX,LD}FLAGS PKG_CONFIG_{LIBDIR,PATH}
echo "${mesonargs[@]}" >&2
"${mesonargs[@]}"
) || die
}
# @FUNCTION: meson_src_compile
# @USAGE: [extra ninja arguments]
# @DESCRIPTION:
# This is the meson_src_compile function.
meson_src_compile() {
debug-print-function ${FUNCNAME} "$@"
eninja -C "${BUILD_DIR}" "$@"
}
# @FUNCTION: meson_src_test
# @USAGE: [extra meson test arguments]
# @DESCRIPTION:
# This is the meson_src_test function.
meson_src_test() {
debug-print-function ${FUNCNAME} "$@"
local mesontestargs=(
-C "${BUILD_DIR}"
)
[[ -n ${NINJAOPTS} || -n ${MAKEOPTS} ]] &&
mesontestargs+=(
--num-processes "$(makeopts_jobs ${NINJAOPTS:-${MAKEOPTS}})"
)
# Append additional arguments from ebuild
mesontestargs+=("${emesontestargs[@]}")
set -- meson test "${mesontestargs[@]}" "$@"
echo "$@" >&2
"$@" || die "tests failed"
}
# @FUNCTION: meson_src_install
# @USAGE: [extra ninja install arguments]
# @DESCRIPTION:
# This is the meson_src_install function.
meson_src_install() {
debug-print-function ${FUNCNAME} "$@"
DESTDIR="${D}" eninja -C "${BUILD_DIR}" install "$@"
einstalldocs
}
fi

Some files were not shown because too many files have changed in this diff Show More