Remove old implementation of making bootstrap dir

This commit is contained in:
Waqar Ahmed
2021-04-22 06:19:00 +05:00
committed by Waqar Ahmed
parent e3da709e98
commit 4a8b3d823e
15 changed files with 42 additions and 257 deletions

View File

@@ -2,12 +2,13 @@ import os
import shutil
from scale_build.clean import clean_packages
from scale_build.utils.manifest import get_manifest
from scale_build.utils.paths import BUILDER_DIR, CHROOT_BASEDIR
from scale_build.utils.run import run
from .cache import CacheMixin
from .hash import HashMixin
from .utils import get_apt_preferences, get_manifest
from .utils import get_apt_preferences
class BootstrapDir(CacheMixin, HashMixin):

View File

@@ -1,79 +1,8 @@
import json
import os
from scale_build.clean import clean_packages
from scale_build.exceptions import CallError
from scale_build.utils.paths import CACHE_DIR
from .hash import get_all_repo_hash
from .logger import get_logger
from .utils import CACHE_DIR, CHROOT_BASEDIR, get_cache_filename, get_cache_hash_filename, HASH_DIR, run
def create_basehash(cache_type):
# This is to check if apt mirrors have changed
with open(os.path.join(CACHE_DIR, get_cache_hash_filename(cache_type)), 'w') as f:
f.write(get_all_repo_hash())
def check_basechroot_changed():
# This is for checking if we should clean packages
logger = get_logger('package')
base_hash = get_base_hash()
basechroot_hash_path = os.path.join(HASH_DIR, '.basechroot.hash')
if os.path.exists(basechroot_hash_path):
with open(basechroot_hash_path, 'r') as f:
saved_hash = f.read().strip()
if saved_hash != base_hash:
logger.debug('Upstream repository changes detected. Rebuilding all packages...')
clean_packages()
with open(basechroot_hash_path, 'w') as f:
f.write(base_hash)
def save_build_cache(cache_type):
logger = get_logger(cache_type)
logger.debug('Caching CHROOT_BASEDIR for future runs...')
run([
'mksquashfs', CHROOT_BASEDIR, os.path.join(CACHE_DIR, get_cache_filename(cache_type))
], logger=logger, exception=CallError, exception_msg='Failed squashfs')
def remove_basecache(cache_type):
logger = get_logger(cache_type)
logger.debug('Removing base chroot cache for %s', cache_type.name)
for path in map(
lambda p: os.path.join(CACHE_DIR, p), (get_cache_filename(cache_type), get_cache_hash_filename(cache_type))
):
if os.path.exists(path):
os.unlink(path)
def restore_basecache(cache_type, chroot_basedir, logger=None):
run([
'unsquashfs', '-f', '-d', chroot_basedir, os.path.join(CACHE_DIR, get_cache_filename(cache_type))
], exception=CallError, exception_msg='Failed unsquashfs', logger=logger)
def validate_basecache(cache_type):
# No hash file? Lets remove to be safe
logger = get_logger(cache_type)
cache_hash_file = os.path.join(CACHE_DIR, get_cache_hash_filename(cache_type))
invalidated = True
if not os.path.exists(cache_hash_file) or not os.path.exists(
os.path.join(CACHE_DIR, get_cache_filename(cache_type))
):
remove_basecache(cache_type)
else:
with open(cache_hash_file, 'r') as f:
saved_hash = f.read().strip()
if saved_hash != get_all_repo_hash():
logger.debug('Upstream repo changed! Removing squashfs cache to re-create.')
remove_basecache(cache_type)
else:
invalidated = False
return not invalidated
class CacheMixin:
@@ -132,6 +61,4 @@ class CacheMixin:
return self.installed_packages_in_cache != self.get_packages()
def restore_cache(self, chroot_basedir):
self.run([
'unsquashfs', '-f', '-d', chroot_basedir, self.cache_file_path
])
self.run(['unsquashfs', '-f', '-d', chroot_basedir, self.cache_file_path])

View File

@@ -1,22 +0,0 @@
import os
import shutil
from scale_build.utils.run import run
from scale_build.utils.paths import CHROOT_BASEDIR, TMPFS
def remove_boostrap_directory():
for command in (
['umount', '-f', os.path.join(CHROOT_BASEDIR, 'proc')],
['umount', '-f', os.path.join(CHROOT_BASEDIR, 'sys')],
['umount', '-f', os.path.join(CHROOT_BASEDIR, 'packages')],
['umount', '-f', os.path.join(CHROOT_BASEDIR, 'var/cache/apt')],
['umount', '-f', CHROOT_BASEDIR],
['umount', '-R', '-f', CHROOT_BASEDIR],
['umount', '-R', '-f', TMPFS]
):
run(command, check=False)
for path in (CHROOT_BASEDIR, TMPFS):
shutil.rmtree(path, ignore_errors=True)
os.makedirs(path)

View File

@@ -1,103 +0,0 @@
import os
import shutil
from scale_build.exceptions import CallError
from .cache import check_basechroot_changed, create_basehash, save_build_cache, validate_basecache
from .cleanup import remove_boostrap_directory
from .logger import get_logger
from .utils import (
BootstrapDirectoryType, BUILDER_DIR, CACHE_DIR, CHROOT_BASEDIR, get_apt_preferences, get_manifest, run
)
def make_bootstrapdir(bootstrapdir_type, logger_file=None):
assert bootstrapdir_type in BootstrapDirectoryType
remove_boostrap_directory()
try:
_make_bootstrapdir_impl(bootstrapdir_type, logger_file)
finally:
remove_boostrap_directory()
def _make_bootstrapdir_impl(bootstrapdir_type, logger_file=None):
logger = get_logger(bootstrapdir_type, 'w', logger_file)
run_args = {'logger': logger}
if bootstrapdir_type == BootstrapDirectoryType.CDROM:
deopts = ['--components=main,contrib,nonfree', '--variant=minbase', '--include=systemd-sysv,gnupg']
else:
deopts = []
# Check if we should invalidate the base cache
if validate_basecache(bootstrapdir_type):
logger.debug('Basechroot cache is intact and does not need to be changed')
return
run([
'apt-key', '--keyring', '/etc/apt/trusted.gpg.d/debian-archive-truenas-automatic.gpg', 'add',
os.path.join(BUILDER_DIR, 'keys/truenas.gpg')
], exception=CallError, exception_msg='Failed adding truenas.gpg apt-key', **run_args)
apt_repos = get_manifest()['apt-repos']
run(
['debootstrap'] + deopts + [
'--keyring', '/etc/apt/trusted.gpg.d/debian-archive-truenas-automatic.gpg', 'bullseye',
CHROOT_BASEDIR, apt_repos['url']
], exception=CallError, exception_msg='Failed debootstrap', **run_args
)
create_basehash(bootstrapdir_type)
os.makedirs(os.path.join(CACHE_DIR, 'apt'), exist_ok=True)
run(['mount', 'proc', os.path.join(CHROOT_BASEDIR, 'proc'), '-t', 'proc'], **run_args)
run(['mount', 'sysfs', os.path.join(CHROOT_BASEDIR, 'sys'), '-t', 'sysfs'], **run_args)
run([
'mount', '--bind', os.path.join(CACHE_DIR, 'apt'), os.path.join(CHROOT_BASEDIR, 'var/cache/apt')
], exception=CallError, exception_msg='Failed mount --bind /var/cache/apt', **run_args
)
if bootstrapdir_type == BootstrapDirectoryType.PACKAGE:
# Add extra packages for builds
run([
'chroot', CHROOT_BASEDIR, 'apt', 'install', '-y', 'build-essential', 'dh-make', 'devscripts', 'fakeroot'
], exception=CallError, exception_msg='Failed chroot setup', **run_args)
# Save the correct repo in sources.list
apt_path = os.path.join(CHROOT_BASEDIR, 'etc/apt')
apt_sources_path = os.path.join(apt_path, 'sources.list')
apt_sources = [f'deb {apt_repos["url"]} {apt_repos["distribution"]} {apt_repos["components"]}']
# Set bullseye repo as the priority
with open(os.path.join(apt_path, 'preferences'), 'w') as f:
f.write(get_apt_preferences())
# Add additional repos
for repo in apt_repos['additional']:
logger.debug('Adding additional repo: %r', repo['url'])
shutil.copy(os.path.join(BUILDER_DIR, repo['key']), os.path.join(CHROOT_BASEDIR, 'apt.key'))
run([
'chroot', CHROOT_BASEDIR, 'apt-key', 'add', '/apt.key'
], exception=CallError, exception_msg='Failed adding apt-key', **run_args)
os.unlink(os.path.join(CHROOT_BASEDIR, 'apt.key'))
apt_sources.append(f'deb {repo["url"]} {repo["distribution"]} {repo["component"]}')
# If not building a cd environment
if bootstrapdir_type == BootstrapDirectoryType.PACKAGE:
check_basechroot_changed()
with open(apt_sources_path, 'w') as f:
f.write('\n'.join(apt_sources))
# Update apt
run(['chroot', CHROOT_BASEDIR, 'apt', 'update'], exception=CallError, exception_msg='Failed apt update', **run_args)
# Put our local package up at the top of the food chain
apt_sources.insert(0, 'deb [trusted=yes] file:/packages /')
with open(apt_sources_path, 'w') as f:
f.write('\n'.join(apt_sources))
run(['umount', '-f', os.path.join(CHROOT_BASEDIR, 'var/cache/apt')], **run_args)
run(['umount', '-f', os.path.join(CHROOT_BASEDIR, 'proc')], **run_args)
run(['umount', '-f', os.path.join(CHROOT_BASEDIR, 'sys')], **run_args)
save_build_cache(bootstrapdir_type)

View File

@@ -7,10 +7,11 @@ import requests
import urllib.parse
from scale_build.exceptions import CallError
from scale_build.utils.manifest import get_manifest
from scale_build.utils.run import run
from scale_build.utils.paths import CACHE_DIR, CHROOT_BASEDIR, HASH_DIR
from .utils import get_apt_preferences, get_manifest
from .utils import get_apt_preferences
to_disable = ('requests', 'urllib3')
@@ -23,7 +24,7 @@ INSTALLED_PACKAGES_REGEX = re.compile(r'([^\t]+)\t([^\t]+)\t([\S]+)\n')
def get_repo_hash(repo_url, distribution):
resp = requests.get(urllib.parse.urljoin(repo_url, os.path.join('dists', distribution, 'Release')))
resp = requests.get(urllib.parse.urljoin(repo_url, os.path.join('dists', distribution, 'Release')), timeout=60)
if resp.status_code != 200:
raise CallError(f'Unable to retrieve hash for {repo_url}')
return hashlib.sha256(resp.content).hexdigest()

View File

@@ -1,18 +0,0 @@
from scale_build.utils.logger import get_logger as _get_logger
from .utils import BootstrapDirectoryType
def get_log_file_name(bootstrap_dir_type, logger_file):
if logger_file:
return logger_file
elif bootstrap_dir_type == BootstrapDirectoryType.CDROM:
return 'cdrom-bootstrap.log'
else:
return 'bootstrap_chroot.log'
def get_logger(bootstrap_dir_type, mode='a+', logger_file=None):
return _get_logger(
f'bootstrap_dir_{bootstrap_dir_type.name}', get_log_file_name(bootstrap_dir_type, logger_file), mode
)

View File

@@ -1,18 +1,7 @@
from scale_build.utils.manifest import get_manifest
from scale_build.utils.run import run # noqa
from scale_build.utils.paths import BUILDER_DIR, CACHE_DIR, CHROOT_BASEDIR, HASH_DIR, TMP_DIR, TMPFS # noqa
from scale_build.utils.types import BootstrapDirectoryType # noqa
def get_apt_preferences():
return '\n\n'.join(
'\n'.join(f'{k}: {v}' for k, v in pref.items()) for pref in get_manifest()['apt_preferences']
)
def get_cache_filename(cache_type):
return f'basechroot-{cache_type.name}.squashfs'
def get_cache_hash_filename(cache_type):
return f'{get_cache_filename(cache_type)}.hash'

View File

@@ -1,21 +1,20 @@
import os
import shutil
from scale_build.bootstrap.cache import restore_basecache
from scale_build.utils.run import run
from scale_build.utils.paths import CD_DIR, CHROOT_BASEDIR, PKG_DIR, RELEASE_DIR, TMPFS
from .utils import PACKAGE_PATH
def setup_chroot_basedir(basecache_type, logger=None):
def setup_chroot_basedir(bootstrapdir_obj, logger):
shutil.rmtree(CHROOT_BASEDIR, ignore_errors=True)
os.makedirs(TMPFS, exist_ok=True)
run(
['mount', '-t', 'tmpfs', '-o', 'size=12G', 'tmpfs', TMPFS],
logger=logger
)
restore_basecache(basecache_type, CHROOT_BASEDIR, logger)
bootstrapdir_obj.restore_cache(CHROOT_BASEDIR)
run(['mount', 'proc', os.path.join(CHROOT_BASEDIR, 'proc'), '-t', 'proc'], logger=logger)
run(['mount', 'sysfs', os.path.join(CHROOT_BASEDIR, 'sys'), '-t', 'sysfs'], logger=logger)
os.makedirs(PACKAGE_PATH, exist_ok=True)

View File

@@ -4,17 +4,17 @@ import os
import shutil
from scale_build.config import VERSION
from scale_build.utils.logger import get_logger
from scale_build.utils.manifest import get_manifest
from scale_build.utils.run import run
from scale_build.utils.paths import BUILDER_DIR, CD_DIR, CHROOT_BASEDIR, CONF_GRUB, RELEASE_DIR, TMP_DIR
from .logger import get_logger
from .manifest import UPDATE_FILE
from .utils import run_in_chroot
def install_iso_packages():
installer_logger = get_logger('cdrom-packages')
installer_logger = get_logger('cdrom-packages', 'cdrom-packages.log')
run_in_chroot('apt update', logger=installer_logger)
# echo "/dev/disk/by-label/TRUENAS / iso9660 loop 0 0" > ${CHROOT_BASEDIR}/etc/fstab
@@ -28,7 +28,7 @@ def install_iso_packages():
def make_iso_file():
iso_logger = get_logger('cdrom-iso')
iso_logger = get_logger('cdrom-iso', 'cdrom-iso.log')
for f in glob.glob(os.path.join(RELEASE_DIR, '*.iso*')):
os.unlink(f)
@@ -69,7 +69,7 @@ def make_iso_file():
shutil.copy(os.path.join(CHROOT_BASEDIR, 'initrd.img'), CD_DIR)
shutil.copy(os.path.join(CHROOT_BASEDIR, 'vmlinuz'), CD_DIR)
for f in itertools.chain(
glob.glob(os.path.join(CD_DIR, 'boot/initrd.img-')),
glob.glob(os.path.join(CD_DIR, 'boot/initrd.img-*')),
glob.glob(os.path.join(CD_DIR, 'boot/vmlinuz-*')),
):
os.unlink(f)

View File

@@ -1,3 +1,4 @@
import contextlib
import glob
import itertools
import logging
@@ -7,11 +8,11 @@ import shutil
from scale_build.config import SIGNING_KEY, SIGNING_PASSWORD
from scale_build.exceptions import CallError
from scale_build.utils.logger import get_logger
from scale_build.utils.manifest import get_manifest
from scale_build.utils.run import run
from scale_build.utils.paths import CHROOT_BASEDIR, CONF_SOURCES, RELEASE_DIR, UPDATE_DIR
from .logger import get_logger
from .manifest import build_manifest, build_update_manifest, UPDATE_FILE, UPDATE_FILE_HASH
from .utils import run_in_chroot
@@ -34,7 +35,7 @@ def build_rootfs_image():
#
# This allows us to verify without ever extracting anything to disk
build_logger = get_logger('rootfs-image', 'w')
build_logger = get_logger('rootfs-image', 'rootfs-image.log', 'w')
# Create the inner image
run(
['mksquashfs', CHROOT_BASEDIR, os.path.join(UPDATE_DIR, 'rootfs.squashfs'), '-comp', 'xz'],
@@ -66,7 +67,7 @@ def sign_manifest(signing_key, signing_pass):
def install_rootfs_packages():
rootfs_logger = get_logger('rootfs-packages', 'w')
rootfs_logger = get_logger('rootfs-packages', 'rootfs-packages', 'w')
os.makedirs(os.path.join(CHROOT_BASEDIR, 'etc/dpkg/dpkg.cfg.d'), exist_ok=True)
with open(os.path.join(CHROOT_BASEDIR, 'etc/dpkg/dpkg.cfg.d/force-unsafe-io'), 'w') as f:
f.write('force-unsafe-io')
@@ -112,7 +113,7 @@ def custom_rootfs_setup(rootfs_logger):
)
for unit_file in filter(lambda f: f.endswith('.service'), os.listdir(tmp_systemd)):
with open(os.path.join(tmp_systemd, unit_file), 'a') as f:
f.write(textwrap.dedent('''
f.write(textwrap.dedent('''\
[Install]
WantedBy=multi-user.target
'''))
@@ -146,7 +147,7 @@ def clean_rootfs(rootfs_logger):
# So to prevent a bunch of systemd "Failed" messages to be barfed to the console during boot,
# we remove this file because the linux kernel dynamically loads the modules based on whether
# or not you have the actual hardware installed in the system.
if os.path.exists(os.path.join(CHROOT_BASEDIR, 'etc/modprobe.d/nvidia.conf')):
with contextlib.suppress(FileNotFoundError):
os.unlink(os.path.join(CHROOT_BASEDIR, 'etc/modprobe.d/nvidia.conf'))
for path in (

View File

@@ -3,15 +3,15 @@ import glob
import logging
import os
from .bootstrap.configure import make_bootstrapdir
from .bootstrap.bootstrapdir import CdromBootstrapDirectory
from .config import VERSION
from .exceptions import CallError
from .image.bootstrap import (
clean_mounts, setup_chroot_basedir, umount_chroot_basedir, umount_tmpfs_and_clean_chroot_dir
)
from .image.iso import install_iso_packages, make_iso_file
from .image.logger import get_logger
from .image.manifest import UPDATE_FILE
from .utils.logger import get_logger
from .utils.paths import LOG_DIR, RELEASE_DIR
@@ -35,8 +35,11 @@ def build_impl():
raise CallError('Missing rootfs image. Run \'make update\' first.', errno.ENOENT)
logger.debug('Bootstrapping CD chroot [ISO] (%s/cdrom-bootstrap.log)', LOG_DIR)
make_bootstrapdir('cdrom')
setup_chroot_basedir('cdrom', get_logger('cdrom-bootstrap'))
cdrom_bootstrap_obj = CdromBootstrapDirectory(get_logger('cdrom-bootstrap', 'cdrom-bootstrap.log', 'w'))
with cdrom_bootstrap_obj as p:
p.setup()
setup_chroot_basedir(cdrom_bootstrap_obj, cdrom_bootstrap_obj.logger)
logger.debug('Installing packages [ISO] (%s/cdrom-packages.log)', LOG_DIR)
install_iso_packages()

View File

@@ -6,10 +6,11 @@ import threading
from toposort import toposort
from .bootstrap.configure import make_bootstrapdir
from .bootstrap.bootstrapdir import PackageBootstrapDirectory
from .clean import clean_bootstrap_logs
from .config import PARALLEL_BUILD, PKG_DEBUG
from .packages.order import get_to_build_packages
from .utils.logger import get_logger
from .utils.paths import PKG_DIR, PKG_LOG_DIR
from .utils.run import interactive_run, run
@@ -101,7 +102,10 @@ def build_packages():
def _build_packages_impl():
logger.info('Building packages')
logger.debug('Setting up bootstrap directory')
make_bootstrapdir('package')
with PackageBootstrapDirectory(get_logger('package_bootstrap', 'bootstrap_chroot', 'w')) as p:
p.setup()
logger.debug('Successfully setup bootstrap directory')
shutil.rmtree(PKG_LOG_DIR, ignore_errors=True)

View File

@@ -1,6 +1,6 @@
import os
from scale_build.bootstrap.cache import restore_basecache
from scale_build.bootstrap.bootstrapdir import PackageBootstrapDirectory
from scale_build.utils.run import run
@@ -13,4 +13,4 @@ class BootstrapMixin:
['mount', '-t', 'tmpfs', '-o', f'size={self.tmpfs_size}G', 'tmpfs', self.tmpfs_path],
logger=self.logger
)
restore_basecache('package', self.chroot_base_directory, self.logger)
PackageBootstrapDirectory(self.logger).restore_cache(self.chroot_base_directory)

View File

@@ -1,13 +1,13 @@
import logging
import os
from .bootstrap.configure import make_bootstrapdir
from .bootstrap.bootstrapdir import PackageBootstrapDirectory
from .image.bootstrap import (
clean_mounts, setup_chroot_basedir, umount_chroot_basedir, umount_tmpfs_and_clean_chroot_dir
)
from .image.logger import get_logger
from .image.manifest import UPDATE_FILE
from .image.update import install_rootfs_packages, build_rootfs_image
from .utils.logger import get_logger
from .utils.paths import CHROOT_BASEDIR, LOG_DIR, RELEASE_DIR
@@ -28,10 +28,13 @@ def build_update_image_impl():
clean_mounts()
os.makedirs(CHROOT_BASEDIR)
logger.debug('Bootstrapping TrueNAS rootfs [UPDATE] (%s/rootfs-bootstrap.log)', LOG_DIR)
make_bootstrapdir('package', 'rootfs-bootstrap.log')
package_bootstrap_obj = PackageBootstrapDirectory(get_logger('rootfs-bootstrap', 'rootfs-bootstrap.log', 'w'))
with package_bootstrap_obj as p:
p.setup()
logger.debug('Installing TrueNAS rootfs package [UPDATE] (%s/rootfs-package.log)', LOG_DIR)
setup_chroot_basedir('package', get_logger('rootfs-bootstrap'))
setup_chroot_basedir(package_bootstrap_obj, package_bootstrap_obj.logger)
install_rootfs_packages()
umount_chroot_basedir()

View File

@@ -4,7 +4,7 @@ import os
from .paths import LOG_DIR
def get_logger(logger_name, logger_path, mode='a+'):
def get_logger(logger_name, logger_path=None, mode='a+'):
logger = logging.getLogger(logger_name)
logger.propagate = False
logger.setLevel('DEBUG')