Skip to content

Commit

Permalink
Clone SOURCES repos outside container
Browse files Browse the repository at this point in the history
This lets us always use speed-up options like --filter, even when we're
building inside a container.

We can also easily start using sapling for this in future by implementing the
new SCM methods.

This also helps with some annoying authentication-related issues, if running
inside --docker, such as in CI.

Fixes <https://its.cern.ch/jira/browse/O2-2439>.

When this is merged, we can also get rid of the `GIT_*` environment variables
used in CI, and stop installing an especially-new Git version in our
containers (since that was needed to understand the `GIT_*` variables).
  • Loading branch information
TimoWilken authored and ktf committed Mar 6, 2024
1 parent 54fdf8d commit 75a4397
Show file tree
Hide file tree
Showing 9 changed files with 162 additions and 98 deletions.
40 changes: 9 additions & 31 deletions alibuild_helpers/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,12 @@
from alibuild_helpers.utilities import Hasher
from alibuild_helpers.utilities import yamlDump
from alibuild_helpers.utilities import resolve_tag, resolve_version
from alibuild_helpers.git import git, clone_speedup_options, Git
from alibuild_helpers.git import Git, git
from alibuild_helpers.sl import Sapling
from alibuild_helpers.scm import SCMError
from alibuild_helpers.sync import remote_from_url
import yaml
from alibuild_helpers.workarea import logged_scm, updateReferenceRepoSpec
from alibuild_helpers.workarea import logged_scm, updateReferenceRepoSpec, checkout_sources
from alibuild_helpers.log import ProgressPrint, log_current_package
from glob import glob
from textwrap import dedent
Expand Down Expand Up @@ -69,9 +69,7 @@ def update_repo(package, git_prompt):
if exists(os.path.join(specs[package]["source"], ".sl")):
specs[package]["scm"] = Sapling()
updateReferenceRepoSpec(args.referenceSources, package, specs[package],
fetch=args.fetchRepos,
usePartialClone=not args.docker,
allowGitPrompt=git_prompt)
fetch=args.fetchRepos, allowGitPrompt=git_prompt)

# Retrieve git heads
output = logged_scm(specs[package]["scm"], package, args.referenceSources,
Expand Down Expand Up @@ -967,10 +965,6 @@ def doBuild(args, parser):
if spec["cachedTarball"] else "No cache tarballs found")

# The actual build script.
referenceStatement = ""
if "reference" in spec:
referenceStatement = "export GIT_REFERENCE=${GIT_REFERENCE_OVERRIDE:-%s}/%s" % (dirname(spec["reference"]), basename(spec["reference"]))

debug("spec = %r", spec)

cmd_raw = ""
Expand All @@ -982,22 +976,14 @@ def doBuild(args, parser):
from pkg_resources import resource_string
cmd_raw = resource_string("alibuild_helpers", 'build_template.sh')

source = spec.get("source", "")
# Shortend the commit hash in case it's a real commit hash and not simply
# the tag.
commit_hash = spec["commit_hash"]
if spec["tag"] != spec["commit_hash"]:
commit_hash = spec["commit_hash"][0:10]

# Split the source in two parts, sourceDir and sourceName. This is done so
# that when we use Docker we can replace sourceDir with the correct
# container path, if required. No changes for what concerns the standard
# bash builds, though.
if args.docker:
cachedTarball = re.sub("^" + workDir, "/sw", spec["cachedTarball"])
else:
cachedTarball = spec["cachedTarball"]

if not cachedTarball:
checkout_sources(spec, workDir, args.referenceSources, args.docker)

scriptDir = join(workDir, "SPECS", args.architecture, spec["package"],
spec["version"] + "-" + spec["revision"])

Expand All @@ -1011,11 +997,6 @@ def doBuild(args, parser):
"workDir": workDir,
"configDir": abspath(args.configDir),
"incremental_recipe": spec.get("incremental_recipe", ":"),
"sourceDir": (dirname(source) + "/") if source else "",
"sourceName": basename(source) if source else "",
"referenceStatement": referenceStatement,
"gitOptionsStatement": "" if args.docker else
"export GIT_CLONE_SPEEDUP=" + quote(" ".join(clone_speedup_options())),
"requires": " ".join(spec["requires"]),
"build_requires": " ".join(spec["build_requires"]),
"runtime_requires": " ".join(spec["runtime_requires"]),
Expand All @@ -1028,14 +1009,14 @@ def doBuild(args, parser):
("BUILD_REQUIRES", " ".join(spec["build_requires"])),
("CACHED_TARBALL", cachedTarball),
("CAN_DELETE", args.aggressiveCleanup and "1" or ""),
("COMMIT_HASH", commit_hash),
# Shorten the commit hash if it's a real commit hash and not simply the tag.
("COMMIT_HASH", spec["tag"] if spec["tag"] == spec["commit_hash"] else spec["commit_hash"][:10]),
("DEPS_HASH", spec.get("deps_hash", "")),
("DEVEL_HASH", spec.get("devel_hash", "")),
("DEVEL_PREFIX", develPrefix),
("BUILD_FAMILY", spec["build_family"]),
("GIT_COMMITTER_NAME", "unknown"),
("GIT_COMMITTER_EMAIL", "unknown"),
("GIT_TAG", spec["tag"]),
("INCREMENTAL_BUILD_HASH", spec.get("incremental_hash", "0")),
("JOBS", str(args.jobs)),
("PKGHASH", spec["hash"]),
Expand All @@ -1048,7 +1029,6 @@ def doBuild(args, parser):
("FULL_RUNTIME_REQUIRES", " ".join(spec["full_runtime_requires"])),
("FULL_BUILD_REQUIRES", " ".join(spec["full_build_requires"])),
("FULL_REQUIRES", " ".join(spec["full_requires"])),
("WRITE_REPO", spec.get("write_repo", source)),
]
# Add the extra environment as passed from the command line.
buildEnvironment += [e.partition('=')[::2] for e in args.environment]
Expand All @@ -1059,15 +1039,13 @@ def doBuild(args, parser):
build_command = (
"docker run --rm --entrypoint= --user $(id -u):$(id -g) "
"-v {workdir}:/sw -v {scriptDir}/build.sh:/build.sh:ro "
"-e GIT_REFERENCE_OVERRIDE=/mirror -e WORK_DIR_OVERRIDE=/sw "
"{mirrorVolume} {develVolumes} {additionalEnv} {additionalVolumes} "
"{overrideSource} {extraArgs} {image} bash -ex /build.sh"
"-e WORK_DIR_OVERRIDE=/sw {extraArgs} {image} bash -ex /build.sh"
).format(
image=quote(args.dockerImage),
workdir=quote(abspath(args.workDir)),
scriptDir=quote(scriptDir),
extraArgs=" ".join(map(quote, args.docker_extra_args)),
overrideSource="-e SOURCE0_DIR_OVERRIDE=/" if source.startswith("/") else "",
additionalEnv=" ".join(
"-e {}={}".format(var, quote(value)) for var, value in buildEnvironment),
# Used e.g. by O2DPG-sim-tests to find the O2DPG repository.
Expand Down
47 changes: 4 additions & 43 deletions alibuild_helpers/build_template.sh
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ export PATH=$WORK_DIR/wrapper-scripts:$PATH
# - DEPS_HASH
# - DEVEL_HASH
# - DEVEL_PREFIX
# - GIT_TAG
# - INCREMENTAL_BUILD_HASH
# - JOBS
# - PKGHASH
Expand All @@ -36,36 +35,27 @@ export PATH=$WORK_DIR/wrapper-scripts:$PATH
# - PKGVERSION
# - REQUIRES
# - RUNTIME_REQUIRES
# - WRITE_REPO

export PKG_NAME="$PKGNAME"
export PKG_VERSION="$PKGVERSION"
export PKG_BUILDNUM="$PKGREVISION"

export SOURCE0="${SOURCE0_DIR_OVERRIDE:-%(sourceDir)s}%(sourceName)s"
export PKGPATH=${ARCHITECTURE}/${PKGNAME}/${PKGVERSION}-${PKGREVISION}
mkdir -p "$WORK_DIR/BUILD" "$WORK_DIR/SOURCES" "$WORK_DIR/TARS" \
"$WORK_DIR/SPECS" "$WORK_DIR/INSTALLROOT"
export BUILDROOT="$WORK_DIR/BUILD/$PKGHASH"

# In case the repository is local, it means we are in development mode, so we
# install directly in $WORK_DIR/$PKGPATH so that we can do make install
# directly into BUILD/$PKGPATH and have changes being propagated.
if [ "${SOURCE0:0:1}" == "/" ]; then
# If we are in development mode, then install directly in $WORK_DIR/$PKGPATH,
# so that we can do "make install" directly into BUILD/$PKGPATH and have
# changes being propagated.
if [ -n "$DEVEL_HASH" ]; then
export INSTALLROOT="$WORK_DIR/$PKGPATH"
else
export INSTALLROOT="$WORK_DIR/INSTALLROOT/$PKGHASH/$PKGPATH"
fi
export SOURCEDIR="$WORK_DIR/SOURCES/$PKGNAME/$PKGVERSION/$COMMIT_HASH"
export BUILDDIR="$BUILDROOT/$PKGNAME"

SHORT_TAG=${GIT_TAG:0:10}
mkdir -p $(dirname $SOURCEDIR)
if [[ ${COMMIT_HASH} != ${GIT_TAG} && "${SHORT_TAG:-0}" != ${COMMIT_HASH} ]]; then
GIT_TAG_DIR=${GIT_TAG:-0}
GIT_TAG_DIR=${GIT_TAG_DIR//\//_}
ln -snf ${COMMIT_HASH} "$WORK_DIR/SOURCES/$PKGNAME/$PKGVERSION/${GIT_TAG_DIR}"
fi
rm -fr "$WORK_DIR/INSTALLROOT/$PKGHASH"
# We remove the build directory only if we are not in incremental mode.
if [[ "$INCREMENTAL_BUILD_HASH" == 0 ]] && ! rm -rf "$BUILDROOT"; then
Expand Down Expand Up @@ -114,35 +104,6 @@ if [[ $DEVEL_PREFIX ]]; then
ln -snf $PKGHASH $WORK_DIR/BUILD/$PKGNAME-latest-$DEVEL_PREFIX
fi

# Reference statements
%(referenceStatement)s
%(gitOptionsStatement)s

if [ -z "$CACHED_TARBALL" ]; then
case "$SOURCE0" in
'') # SOURCE0 is empty, so just create an empty SOURCEDIR.
mkdir -p "$SOURCEDIR" ;;
/*) # SOURCE0 is an absolute path, so just make a symlink there.
ln -snf "$SOURCE0" "$SOURCEDIR" ;;
*) # SOURCE0 is a relative path or URL, so clone/checkout the git repo from there.
if cd "$SOURCEDIR" 2>/dev/null; then
# Folder is already present, but check that it is the right tag
if ! git checkout -f "$GIT_TAG"; then
# If we can't find the tag, it might be new. Fetch tags and try again.
git fetch -f "$SOURCE0" "refs/tags/$GIT_TAG:refs/tags/$GIT_TAG"
git checkout -f "$GIT_TAG"
fi
else
# In case there is a stale link / file, for whatever reason.
rm -rf "$SOURCEDIR"
git clone -n $GIT_CLONE_SPEEDUP ${GIT_REFERENCE:+--reference "$GIT_REFERENCE"} "$SOURCE0" "$SOURCEDIR"
cd "$SOURCEDIR"
git remote set-url --push origin "$WRITE_REPO"
git checkout -f "$GIT_TAG"
fi ;;
esac
fi

cd "$BUILDDIR"

# Actual build script, as defined in the recipe
Expand Down
30 changes: 27 additions & 3 deletions alibuild_helpers/git.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,34 +18,58 @@ def clone_speedup_options():

class Git(SCM):
name = "Git"

def checkedOutCommitName(self, directory):
return git(("rev-parse", "HEAD"), directory)

def branchOrRef(self, directory):
out = git(("rev-parse", "--abbrev-ref", "HEAD"), directory=directory)
if out == "HEAD":
out = git(("rev-parse", "HEAD"), directory)[:10]
return out

def exec(self, *args, **kwargs):
return git(*args, **kwargs)

def parseRefs(self, output):
return {
git_ref: git_hash for git_hash, sep, git_ref
in (line.partition("\t") for line in output.splitlines()) if sep
}

def listRefsCmd(self, repository):
return ["ls-remote", "--heads", "--tags", repository]
def cloneCmd(self, source, referenceRepo, usePartialClone):

def cloneReferenceCmd(self, source, referenceRepo, usePartialClone):
cmd = ["clone", "--bare", source, referenceRepo]
if usePartialClone:
cmd.extend(clone_speedup_options())
return cmd
def fetchCmd(self, source):
return ["fetch", "-f", "--tags", source, "+refs/heads/*:refs/heads/*"]

def cloneSourceCmd(self, source, destination, referenceRepo, usePartialClone):
cmd = ["clone", "-n", source, destination]
if referenceRepo:
cmd.extend(["--reference", referenceRepo])
if usePartialClone:
cmd.extend(clone_speedup_options())
return cmd

def checkoutCmd(self, ref):
return ["checkout", "-f", ref]

def fetchCmd(self, source, *refs):
return ["fetch", "-f", source, *refs]

def setWriteUrlCmd(self, url):
return ["remote", "set-url", "--push", "origin", url]

def diffCmd(self, directory):
return "cd %s && git diff -r HEAD && git status --porcelain" % directory

def checkUntracked(self, line):
return line.startswith("?? ")


def git(args, directory=".", check=True, prompt=True):
debug("Executing git %s (in directory %s)", " ".join(args), directory)
# We can't use git --git-dir=%s/.git or git -C %s here as the former requires
Expand Down
1 change: 1 addition & 0 deletions alibuild_helpers/init.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,7 @@ def doInit(args):

for p in pkgs:
spec = specs.get(p["name"])
spec["is_devel_pkg"] = False
spec["scm"] = Git()
dieOnError(spec is None, "cannot find recipe for package %s" % p["name"])
dest = join(args.develPrefix, spec["package"])
Expand Down
10 changes: 9 additions & 1 deletion alibuild_helpers/scm.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,15 @@ def parseRefs(self, output):
raise NotImplementedError
def exec(self, *args, **kwargs):
raise NotImplementedError
def cloneCmd(self, spec, referenceRepo, usePartialClone):
def checkoutCmd(self, tag):
raise NotImplementedError
def fetchCmd(self, remote, *refs):
raise NotImplementedError
def cloneReferenceCmd(self, spec, referenceRepo, usePartialClone):
raise NotImplementedError
def cloneSourceCmd(self, spec, referenceRepo, usePartialClone):
raise NotImplementedError
def setWriteUrlCmd(self, url):
raise NotImplementedError
def diffCmd(self, directory):
raise NotImplementedError
Expand Down
9 changes: 9 additions & 0 deletions alibuild_helpers/sl.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
SL_COMMAND_TIMEOUT_SEC = 120
"""How many seconds to let any sl command execute before being terminated."""


# Sapling is a novel SCM by Meta (i.e. Facebook) that is fully compatible with
# git, but has a different command line interface. Among the reasons why it's
# worth suporting it is the ability to handle unnamed branches, the ability to
Expand All @@ -14,26 +15,34 @@
# command line from each commit of a branch.
class Sapling(SCM):
name = "Sapling"

def checkedOutCommitName(self, directory):
return sapling(("whereami", ), directory)

def branchOrRef(self, directory):
# Format is <hash>[+] <branch>
identity = sapling(("identify", ), directory)
return identity.split(" ")[-1]

def exec(self, *args, **kwargs):
return sapling(*args, **kwargs)

def parseRefs(self, output):
return {
sl_ref: sl_hash for sl_ref, sep, sl_hash
in (line.partition("\t") for line in output.splitlines()) if sep
}

def listRefsCmd(self, repository):
return ["bookmark", "--list", "--remote", "-R", repository]

def diffCmd(self, directory):
return "cd %s && sl diff && sl status" % directory

def checkUntracked(self, line):
return line.startswith("? ")


def sapling(args, directory=".", check=True, prompt=True):
debug("Executing sl %s (in directory %s)", " ".join(args), directory)
# We can't use git --git-dir=%s/.git or git -C %s here as the former requires
Expand Down
Loading

0 comments on commit 75a4397

Please sign in to comment.