Compare commits
No commits in common. "dependencies" and "main" have entirely different histories.
dependenci
...
main
|
@ -1,19 +0,0 @@
|
||||||
# vim:sts=4:sw=4
|
|
||||||
FROM python:3.9-slim-bullseye
|
|
||||||
|
|
||||||
RUN mkdir /build && \
|
|
||||||
apt update && apt install -y git && \
|
|
||||||
rm -rf /var/lib/apt/lists/* && \
|
|
||||||
cd /root && \
|
|
||||||
git clone https://git.libreitalia.org/libreitalia/loaih.git && \
|
|
||||||
cd loaih && git checkout dependencies && \
|
|
||||||
python3 -m venv venv && \
|
|
||||||
. venv/bin/activate && \
|
|
||||||
pip install build && \
|
|
||||||
python3 -m build && \
|
|
||||||
pip install dist/loaih*.whl && \
|
|
||||||
deactivate && \
|
|
||||||
ln -sf /root/loaih/venv/bin/loaih /usr/local/bin/loaih
|
|
||||||
WORKDIR /build
|
|
||||||
ENTRYPOINT [ "/usr/local/bin/loaih" ]
|
|
||||||
CMD [ "--help" ]
|
|
|
@ -16,8 +16,8 @@ if [[ ${retval} -ne 0 ]]; then
|
||||||
fi
|
fi
|
||||||
pip3 uninstall -y loaih
|
pip3 uninstall -y loaih
|
||||||
# build the actual toolkit
|
# build the actual toolkit
|
||||||
python3 -m build -w
|
python3 setup.py bdist_wheel
|
||||||
pip3 install dist/loaih*.whl; rv=$?
|
pip3 install dist/*.whl; rv=$?
|
||||||
if [[ -d venv ]]; then
|
if [[ -d venv ]]; then
|
||||||
deactivate
|
deactivate
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -18,7 +18,6 @@ dependencies = [
|
||||||
"lxml",
|
"lxml",
|
||||||
"pyyaml",
|
"pyyaml",
|
||||||
"requests",
|
"requests",
|
||||||
"lddcollect"
|
|
||||||
]
|
]
|
||||||
classifiers = [
|
classifiers = [
|
||||||
"Development Status :: 5 - Production/Stable",
|
"Development Status :: 5 - Production/Stable",
|
||||||
|
|
|
@ -134,7 +134,7 @@ class Solver():
|
||||||
retval.query = self.text
|
retval.query = self.text
|
||||||
retval.branch = self.branch
|
retval.branch = self.branch
|
||||||
retval.version = self.version
|
retval.version = self.version
|
||||||
if retval.branch != 'daily' and retval.branch != 'prerelease':
|
if retval.branch != 'daily':
|
||||||
retval.urls['x86_64'] = self.baseurl + 'x86_64/'
|
retval.urls['x86_64'] = self.baseurl + 'x86_64/'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -234,8 +234,6 @@ class NamedSolver(Solver):
|
||||||
fullversion: str = str(archived_versions[-1])
|
fullversion: str = str(archived_versions[-1])
|
||||||
self.baseurl = ARCHIVE + fullversion + 'deb/'
|
self.baseurl = ARCHIVE + fullversion + 'deb/'
|
||||||
self.version = fullversion.rstrip('/')
|
self.version = fullversion.rstrip('/')
|
||||||
if self.branch == 'prerelease':
|
|
||||||
self.baseurl = PRERELEASE
|
|
||||||
|
|
||||||
return self.version
|
return self.version
|
||||||
|
|
||||||
|
|
|
@ -36,12 +36,10 @@ class Build():
|
||||||
LANGBASIC = [ 'en-GB' ]
|
LANGBASIC = [ 'en-GB' ]
|
||||||
ARCHSTD = [ 'x86', 'x86_64' ]
|
ARCHSTD = [ 'x86', 'x86_64' ]
|
||||||
|
|
||||||
def __init__(self, version: loaih.Version, arch, debug=False):
|
def __init__(self, version: loaih.Version, arch):
|
||||||
self.debug = debug
|
|
||||||
self.version = version
|
self.version = version
|
||||||
self.tidy_folder = True
|
self.tidy_folder = True
|
||||||
self.verbose = True
|
self.verbose = True
|
||||||
self.check_dependencies = False
|
|
||||||
self.arch = arch
|
self.arch = arch
|
||||||
self.short_version = str.join('.', self.version.version.split('.')[0:2])
|
self.short_version = str.join('.', self.version.version.split('.')[0:2])
|
||||||
self.branch_version = self.version.branch
|
self.branch_version = self.version.branch
|
||||||
|
@ -90,27 +88,6 @@ class Build():
|
||||||
def calculate(self):
|
def calculate(self):
|
||||||
"""Calculate exclusions and other variables."""
|
"""Calculate exclusions and other variables."""
|
||||||
|
|
||||||
if self.verbose:
|
|
||||||
print("--- Preliminary Phase ---")
|
|
||||||
|
|
||||||
if isinstance(shutil.which('apt'), str):
|
|
||||||
# APT is found in path. We assume we can find dependencies.
|
|
||||||
self.check_dependencies = True
|
|
||||||
|
|
||||||
|
|
||||||
if self.verbose:
|
|
||||||
print("Updating system packages cache.")
|
|
||||||
# Updating package cache
|
|
||||||
subprocess.run(['sudo', 'apt', 'update'], check=True, stdout=subprocess.DEVNULL)
|
|
||||||
if self.verbose:
|
|
||||||
print("Ensuring apt-file is installed and updated.")
|
|
||||||
# Updating apt-file cache
|
|
||||||
subprocess.run(['sudo', 'apt', 'install', 'apt-file', '-y'], check=True, stdout=subprocess.DEVNULL)
|
|
||||||
subprocess.run(['sudo', 'apt-file', 'update'], check=True, stdout=subprocess.DEVNULL)
|
|
||||||
else:
|
|
||||||
print("CAUTION: your system seems not to include a working version of apt.\nThis will cause the AppImage to leverage system libraries when run.")
|
|
||||||
self.check_dependencies = False
|
|
||||||
|
|
||||||
if self.verbose:
|
if self.verbose:
|
||||||
print("--- Calculate Phase ---")
|
print("--- Calculate Phase ---")
|
||||||
|
|
||||||
|
@ -197,7 +174,7 @@ class Build():
|
||||||
print(f"Found requested AppImage: {self.appimagefilename}.")
|
print(f"Found requested AppImage: {self.appimagefilename}.")
|
||||||
|
|
||||||
|
|
||||||
def download(self, compact=False):
|
def download(self):
|
||||||
"""Downloads the contents of the URL as it was a folder."""
|
"""Downloads the contents of the URL as it was a folder."""
|
||||||
|
|
||||||
if self.verbose:
|
if self.verbose:
|
||||||
|
@ -217,20 +194,12 @@ class Build():
|
||||||
self.found = True
|
self.found = True
|
||||||
|
|
||||||
# Identifying downloads
|
# Identifying downloads
|
||||||
self.tarballs = [ x for x in loaih.match_xpath(self.url, "//td/a/text()") if x.endswith('tar.gz') and 'deb' in x and self.version.version in x ]
|
self.tarballs = [ x for x in loaih.match_xpath(self.url, "//td/a/text()") if x.endswith('tar.gz') and 'deb' in x ]
|
||||||
|
|
||||||
self.download_tarballs = []
|
|
||||||
|
|
||||||
# Issue #5: manage a limited number of downloads and not the full set.
|
|
||||||
if compact:
|
|
||||||
self.download_tarballs = self.__select_tarballs__()
|
|
||||||
else:
|
|
||||||
self.download_tarballs = self.tarballs
|
|
||||||
|
|
||||||
# Create and change directory to the download location
|
# Create and change directory to the download location
|
||||||
os.makedirs(self.download_path, exist_ok = True)
|
os.makedirs(self.download_path, exist_ok = True)
|
||||||
os.chdir(self.download_path)
|
os.chdir(self.download_path)
|
||||||
for archive in self.download_tarballs:
|
for archive in self.tarballs:
|
||||||
# If the archive is already there, do not do anything.
|
# If the archive is already there, do not do anything.
|
||||||
if os.path.exists(archive):
|
if os.path.exists(archive):
|
||||||
continue
|
continue
|
||||||
|
@ -248,12 +217,12 @@ class Build():
|
||||||
def build(self):
|
def build(self):
|
||||||
"""Building all the versions."""
|
"""Building all the versions."""
|
||||||
|
|
||||||
if self.found:
|
|
||||||
return
|
|
||||||
|
|
||||||
if self.verbose:
|
if self.verbose:
|
||||||
print("--- Building Phase ---")
|
print("--- Building Phase ---")
|
||||||
|
|
||||||
|
if self.found:
|
||||||
|
return
|
||||||
|
|
||||||
# Preparation tasks
|
# Preparation tasks
|
||||||
self.appnamedir = os.path.join(self.builddir, self.appname)
|
self.appnamedir = os.path.join(self.builddir, self.appname)
|
||||||
os.makedirs(self.appnamedir, exist_ok=True)
|
os.makedirs(self.appnamedir, exist_ok=True)
|
||||||
|
@ -267,13 +236,6 @@ class Build():
|
||||||
|
|
||||||
# Build the requested version.
|
# Build the requested version.
|
||||||
self.__unpackbuild__()
|
self.__unpackbuild__()
|
||||||
self.__prepare_contents__()
|
|
||||||
if self.check_dependencies:
|
|
||||||
if self.verbose:
|
|
||||||
print("Searching for dependent libraries, it might take a while.")
|
|
||||||
|
|
||||||
self.__missing_dependencies__()
|
|
||||||
self.__finalize_build__()
|
|
||||||
|
|
||||||
|
|
||||||
def checksums(self):
|
def checksums(self):
|
||||||
|
@ -473,46 +435,40 @@ class Build():
|
||||||
file.write(chunk)
|
file.write(chunk)
|
||||||
return filename
|
return filename
|
||||||
|
|
||||||
def __select_tarballs__(self):
|
def __unpackbuild__(self):
|
||||||
retval = [ self.tarballs[0] ]
|
# We start by filtering out tarballs from the list
|
||||||
|
buildtarballs = [ self.tarballs[0] ]
|
||||||
|
|
||||||
# Let's process standard languages and append results to the
|
# Let's process standard languages and append results to the
|
||||||
# buildtarball
|
# buildtarball
|
||||||
if self.language == 'basic':
|
if self.language == 'basic':
|
||||||
if self.offline_help:
|
if self.offline_help:
|
||||||
retval.extend([ x for x in self.tarballs if 'pack_en-GB' in x ])
|
buildtarballs.extend([ x for x in self.tarballs if 'pack_en-GB' in x ])
|
||||||
else:
|
else:
|
||||||
retval.extend([ x for x in self.tarballs if 'langpack_en-GB' in x])
|
buildtarballs.extend([ x for x in self.tarballs if 'langpack_en-GB' in x])
|
||||||
elif self.language == 'standard':
|
elif self.language == 'standard':
|
||||||
for lang in Build.LANGSTD:
|
for lang in Build.LANGSTD:
|
||||||
if self.offline_help:
|
if self.offline_help:
|
||||||
retval.extend([ x for x in self.tarballs if 'pack_' + lang in x ])
|
buildtarballs.extend([ x for x in self.tarballs if 'pack_' + lang in x ])
|
||||||
else:
|
else:
|
||||||
retval.extend([ x for x in self.tarballs if 'langpack_' + lang in x ])
|
buildtarballs.extend([ x for x in self.tarballs if 'langpack_' + lang in x ])
|
||||||
elif self.language == 'full':
|
elif self.language == 'full':
|
||||||
if self.offline_help:
|
if self.offline_help:
|
||||||
# We need also all help. Let's replace buildtarball with the
|
# We need also all help. Let's replace buildtarball with the
|
||||||
# whole bunch
|
# whole bunch
|
||||||
retval = self.tarballs
|
buildtarballs = self.tarballs
|
||||||
else:
|
else:
|
||||||
retval.extend([ x for x in self.tarballs if 'langpack' in x ])
|
buildtarballs.extend([ x for x in self.tarballs if 'langpack' in x ])
|
||||||
else:
|
else:
|
||||||
# Looping for each language in self.language
|
# Looping for each language in self.language
|
||||||
for lang in self.language.split(","):
|
for lang in self.language.split(","):
|
||||||
if self.offline_help:
|
if self.offline_help:
|
||||||
retval.extend([ x for x in self.tarballs
|
buildtarballs.extend([ x for x in self.tarballs
|
||||||
if 'pack' + lang in x ])
|
if 'pack' + lang in x ])
|
||||||
else:
|
else:
|
||||||
retval.extend([ x for x in self.tarballs
|
buildtarballs.extend([ x for x in self.tarballs
|
||||||
if 'langpack' + lang in x ])
|
if 'langpack' + lang in x ])
|
||||||
|
|
||||||
return retval
|
|
||||||
|
|
||||||
|
|
||||||
def __unpackbuild__(self):
|
|
||||||
# We start by filtering out tarballs from the list
|
|
||||||
buildtarballs = self.__select_tarballs__()
|
|
||||||
|
|
||||||
os.chdir(self.appnamedir)
|
os.chdir(self.appnamedir)
|
||||||
|
|
||||||
# Unpacking the tarballs
|
# Unpacking the tarballs
|
||||||
|
@ -523,17 +479,13 @@ class Build():
|
||||||
subprocess.run(shlex.split(
|
subprocess.run(shlex.split(
|
||||||
f"tar xzf {self.download_path}/{archive}"), check=True)
|
f"tar xzf {self.download_path}/{archive}"), check=True)
|
||||||
|
|
||||||
def __prepare_contents__(self):
|
|
||||||
# create appimagedir
|
# create appimagedir
|
||||||
if self.verbose:
|
if self.verbose:
|
||||||
print("---- Preparing the build ----")
|
print("---- Preparing the build ----")
|
||||||
self.appimagedir = os.path.join(self.appnamedir, self.appname + '.AppDir')
|
self.appimagedir = os.path.join(self.builddir, self.appname, self.appname + '.AppDir')
|
||||||
os.makedirs(self.appimagedir, exist_ok = True)
|
os.makedirs(self.appimagedir, exist_ok = True)
|
||||||
|
|
||||||
# At this point, let's decompress the deb packages
|
# At this point, let's decompress the deb packages
|
||||||
if self.verbose:
|
|
||||||
print("Unpacking main archives")
|
|
||||||
|
|
||||||
subprocess.run(shlex.split(
|
subprocess.run(shlex.split(
|
||||||
r"find .. -iname '*.deb' -exec dpkg -x {} . \;"
|
r"find .. -iname '*.deb' -exec dpkg -x {} . \;"
|
||||||
), cwd=self.appimagedir, check=True)
|
), cwd=self.appimagedir, check=True)
|
||||||
|
@ -546,9 +498,6 @@ class Build():
|
||||||
), cwd=self.appimagedir, check=True)
|
), cwd=self.appimagedir, check=True)
|
||||||
|
|
||||||
# Changing desktop file
|
# Changing desktop file
|
||||||
if self.verbose:
|
|
||||||
print("Preparing .desktop file.")
|
|
||||||
|
|
||||||
subprocess.run(shlex.split(
|
subprocess.run(shlex.split(
|
||||||
r"find . -iname startcenter.desktop -exec cp {} . \;"
|
r"find . -iname startcenter.desktop -exec cp {} . \;"
|
||||||
), cwd=self.appimagedir, check=True)
|
), cwd=self.appimagedir, check=True)
|
||||||
|
@ -558,21 +507,11 @@ class Build():
|
||||||
r"startcenter.desktop"
|
r"startcenter.desktop"
|
||||||
), cwd=self.appimagedir, check=False)
|
), cwd=self.appimagedir, check=False)
|
||||||
|
|
||||||
if self.verbose:
|
|
||||||
print("Preparing icon file.")
|
|
||||||
subprocess.run(shlex.split(
|
subprocess.run(shlex.split(
|
||||||
r"find . -name '*startcenter.png' -path '*hicolor*48x48*' " +
|
r"find . -name '*startcenter.png' -path '*hicolor*48x48*' " +
|
||||||
r"-exec cp {} . \;"
|
r"-exec cp {} . \;"
|
||||||
), cwd=self.appimagedir, check=True)
|
), cwd=self.appimagedir, check=True)
|
||||||
|
|
||||||
# Finding path to main executable
|
|
||||||
cmd = subprocess.run(shlex.split(
|
|
||||||
r"find -iname soffice.bin -print"
|
|
||||||
), cwd=self.appimagedir, check = True, capture_output=True)
|
|
||||||
self.main_executable = os.path.abspath(os.path.join(
|
|
||||||
self.appimagedir,
|
|
||||||
cmd.stdout.strip().decode('utf-8')))
|
|
||||||
|
|
||||||
# Find the name of the binary called in the desktop file.
|
# Find the name of the binary called in the desktop file.
|
||||||
binaryname = ''
|
binaryname = ''
|
||||||
with open(
|
with open(
|
||||||
|
@ -584,164 +523,63 @@ class Build():
|
||||||
binaryname = line.split('=')[-1].split(' ')[0]
|
binaryname = line.split('=')[-1].split(' ')[0]
|
||||||
# Esci al primo match
|
# Esci al primo match
|
||||||
break
|
break
|
||||||
|
|
||||||
#binary_exec = subprocess.run(shlex.split(r"awk 'BEGIN { FS = \"=\" } /^Exec/ { print $2; exit }' startcenter.desktop | awk '{ print $1 }'"), cwd=self.appimagedir, text=True, encoding='utf-8')
|
#binary_exec = subprocess.run(shlex.split(r"awk 'BEGIN { FS = \"=\" } /^Exec/ { print $2; exit }' startcenter.desktop | awk '{ print $1 }'"), cwd=self.appimagedir, text=True, encoding='utf-8')
|
||||||
#binaryname = binary_exec.stdout.strip("\n")
|
#binaryname = binary_exec.stdout.strip("\n")
|
||||||
|
|
||||||
# Creating a soft link so the executable in the desktop file is present
|
|
||||||
bindir=os.path.join(self.appimagedir, 'usr', 'bin')
|
bindir=os.path.join(self.appimagedir, 'usr', 'bin')
|
||||||
os.makedirs(bindir, exist_ok = True)
|
os.makedirs(bindir, exist_ok = True)
|
||||||
subprocess.run(shlex.split(
|
subprocess.run(shlex.split(
|
||||||
r"find ../../opt -iname soffice.bin -path '*program*' " +
|
r"find ../../opt -iname soffice -path '*program*' " +
|
||||||
r"-exec ln -sf {} ./%s \;" % binaryname
|
r"-exec ln -sf {} ./%s \;" % binaryname
|
||||||
), cwd=bindir, check=True)
|
), cwd=bindir, check=True)
|
||||||
|
|
||||||
def __missing_dependencies__(self):
|
|
||||||
"""Finds and copy in the appimagedir any missing libraries."""
|
|
||||||
# If the system permits it, we leverage lddcollect
|
|
||||||
# to find the packages that contain .so dependencies in the main build.
|
|
||||||
import lddcollect
|
|
||||||
# We first process the ELF
|
|
||||||
raw = lddcollect.process_elf(self.main_executable, verbose = False, dpkg = True)
|
|
||||||
|
|
||||||
# If all works as expected, we obtain a tuple of:
|
|
||||||
# (debian_packages, all_libraries, files_not_found)
|
|
||||||
(debian_packages, all_libraries, not_found) = raw
|
|
||||||
|
|
||||||
if len(debian_packages) != 0:
|
|
||||||
# Creating temporary folders
|
|
||||||
debs = [ x.split(':')[0] for x in debian_packages ]
|
|
||||||
downloadpath = os.path.abspath(os.path.join(self.builddir, 'dependencies'))
|
|
||||||
os.makedirs(downloadpath)
|
|
||||||
|
|
||||||
|
|
||||||
if self.verbose:
|
|
||||||
print("Downloading missing dependencies, please wait.")
|
|
||||||
|
|
||||||
# Let's try to find and install also other libraries
|
|
||||||
additional = list(dict.fromkeys([ Helpers.lib_to_deb(x) for x in not_found ]))
|
|
||||||
debs.extend(additional)
|
|
||||||
|
|
||||||
# It seems the download command does not download dependencies of
|
|
||||||
# the packages.
|
|
||||||
if self.verbose:
|
|
||||||
print("Constructing the dependency tree.")
|
|
||||||
|
|
||||||
for deb in debian_packages:
|
|
||||||
debs.extend(Helpers.deb_dependencies(deb))
|
|
||||||
|
|
||||||
# Re-cleaning up the dependency tree
|
|
||||||
debs = list(dict.fromkeys(debs))
|
|
||||||
|
|
||||||
# We download the missing dependencies leveraging apt
|
|
||||||
subprocess.run(shlex.split(
|
|
||||||
r"apt download " + " ".join(debs)
|
|
||||||
), cwd=downloadpath, check=True)
|
|
||||||
|
|
||||||
# then we install them inside a temporary path
|
|
||||||
temporary = os.path.abspath(os.path.join(downloadpath, 'temp'))
|
|
||||||
os.makedirs(temporary)
|
|
||||||
subprocess.run(shlex.split(
|
|
||||||
r"find " + downloadpath + r" -iname \*.deb -exec dpkg -x {} " + temporary + r" \;"
|
|
||||||
), cwd=self.builddir, check=True)
|
|
||||||
|
|
||||||
# We are finally copying the .so files in the same path as main_executable
|
|
||||||
libdirs = [ 'lib/x86_64-linux-gnu', 'usr/lib/x86_64-linux-gnu' ]
|
|
||||||
for libdir in libdirs:
|
|
||||||
fulllibdir = os.path.abspath(os.path.join(temporary, libdir))
|
|
||||||
subprocess.run(shlex.split(
|
|
||||||
f"cp -Ra {fulllibdir}/. {os.path.dirname(self.main_executable)}/"
|
|
||||||
), cwd=temporary, check=True)
|
|
||||||
|
|
||||||
if self.debug:
|
|
||||||
with open(os.path.abspath(os.storage_path, 'dependencies.lst'), 'w', encoding="utf-8") as deplist:
|
|
||||||
deplist.write("\n".join(debs))
|
|
||||||
|
|
||||||
def __finalize_build__(self):
|
|
||||||
if self.verbose:
|
|
||||||
print("Finalizing build...")
|
|
||||||
# Cleaning up AppDir
|
|
||||||
cleanup_dirs = [ 'etc', 'lib', 'lib64', 'usr/lib', 'usr/local' ]
|
|
||||||
for local in cleanup_dirs:
|
|
||||||
shutil.rmtree(os.path.abspath(os.path.join(self.appimagedir, local)), ignore_errors=True)
|
|
||||||
|
|
||||||
# Download AppRun from github
|
# Download AppRun from github
|
||||||
apprunurl = r"https://github.com/AppImage/AppImageKit/releases/"
|
apprunurl = r"https://github.com/AppImage/AppImageKit/releases/"
|
||||||
apprunurl += f"download/continuous/AppRun-{self.arch}"
|
apprunurl += f"download/continuous/AppRun-{self.arch}"
|
||||||
dest = os.path.join(self.appimagedir, 'AppRun')
|
dest = os.path.join(self.appimagedir, 'AppRun')
|
||||||
self.__download__(apprunurl, dest)
|
self.__download__(apprunurl, dest)
|
||||||
os.chmod(dest, 0o755)
|
os.chmod(dest, 0o755)
|
||||||
|
|
||||||
# Dealing with extra options
|
# Dealing with extra options
|
||||||
buildopts = []
|
buildopts = []
|
||||||
if self.sign:
|
if self.sign:
|
||||||
buildopts.append('--sign')
|
buildopts.append('--sign')
|
||||||
|
|
||||||
# adding zsync build if updatable
|
# adding zsync build if updatable
|
||||||
if self.updatable:
|
if self.updatable:
|
||||||
buildopts.append(f"-u 'zsync|{self.zsyncfilename}'")
|
buildopts.append(f"-u 'zsync|{self.zsyncfilename}'")
|
||||||
|
|
||||||
buildopts_str = str.join(' ', buildopts)
|
buildopts_str = str.join(' ', buildopts)
|
||||||
|
|
||||||
# Build the number-specific build
|
# Build the number-specific build
|
||||||
if self.verbose:
|
if self.verbose:
|
||||||
print("---- Start building ----")
|
print("---- Start building ----")
|
||||||
subprocess.run(shlex.split(
|
subprocess.run(shlex.split(
|
||||||
f"{self.appnamedir}/appimagetool {buildopts_str} -v " +
|
f"{self.appnamedir}/appimagetool {buildopts_str} -v " +
|
||||||
f"{self.appimagedir}"
|
f"./{self.appname}.AppDir/"
|
||||||
), env={ "VERSION": self.appversion }, check=True)
|
), env={ "VERSION": self.appversion }, check=True)
|
||||||
print("---- End building ----")
|
print("---- End building ----")
|
||||||
else:
|
else:
|
||||||
subprocess.run(shlex.split(
|
subprocess.run(shlex.split(
|
||||||
f"{self.appnamedir}/appimagetool {buildopts_str} -v " +
|
f"{self.appnamedir}/appimagetool {buildopts_str} -v " +
|
||||||
f"{self.appimagedir}"
|
f"./{self.appname}.AppDir/"
|
||||||
), env={ "VERSION": self.appversion }, stdout=subprocess.DEVNULL,
|
), env={ "VERSION": self.appversion }, stdout=subprocess.DEVNULL,
|
||||||
stderr=subprocess.DEVNULL, check=True)
|
stderr=subprocess.DEVNULL, check=True)
|
||||||
|
|
||||||
if self.verbose:
|
if self.verbose:
|
||||||
print(f"Built AppImage version {self.appversion}")
|
print(f"Built AppImage version {self.appversion}")
|
||||||
|
|
||||||
# Cleanup phase, before new run.
|
# Cleanup phase, before new run.
|
||||||
for deb in glob.glob(self.appnamedir + '/*.deb'):
|
for deb in glob.glob(self.appnamedir + '/*.deb'):
|
||||||
os.remove(deb)
|
os.remove(deb)
|
||||||
subprocess.run(shlex.split(
|
subprocess.run(shlex.split(
|
||||||
r"find . -mindepth 1 -maxdepth 1 -type d -exec rm -rf {} \+"
|
r"find . -mindepth 1 -maxdepth 1 -type d -exec rm -rf {} \+"
|
||||||
), check=True)
|
), check=True)
|
||||||
|
|
||||||
self.built = True
|
self.built = True
|
||||||
|
|
||||||
def __del__(self):
|
def __del__(self):
|
||||||
"""Destructor"""
|
"""Destructor"""
|
||||||
if not self.debug:
|
# Cleaning up build directory
|
||||||
# Cleaning up build directory
|
shutil.rmtree(self.builddir)
|
||||||
shutil.rmtree(self.builddir)
|
|
||||||
|
|
||||||
|
|
||||||
class Helpers:
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def deb_dependencies(package_name):
|
|
||||||
"""Returns the array of the dependencies of that package."""
|
|
||||||
|
|
||||||
# First pass: find dependency of that package in raw output
|
|
||||||
pass1 = subprocess.Popen(shlex.split(
|
|
||||||
f"apt-cache depends --recurse --no-recommends --no-suggests --no-conflicts --no-breaks --no-replaces --no-enhances --no-pre-depends {package_name}"
|
|
||||||
), stdout=subprocess.PIPE)
|
|
||||||
|
|
||||||
# Second pass: only grep interesting lines.
|
|
||||||
pass2 = subprocess.Popen(shlex.split(
|
|
||||||
r"grep '^\w'"
|
|
||||||
), stdin=pass1.stdout, stdout=subprocess.PIPE, encoding='utf-8')
|
|
||||||
stdout, stderr = pass2.communicate()
|
|
||||||
|
|
||||||
return stdout.strip().split("\n")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def lib_to_deb(libraryname):
|
|
||||||
"""Uses system tools to identify the missing package."""
|
|
||||||
|
|
||||||
libsearch = subprocess.run(shlex.split(
|
|
||||||
f"sudo apt-file find -lx {libraryname}$"
|
|
||||||
), check=True, capture_output=True)
|
|
||||||
candidate = [ x for x in libsearch.stdout.decode('utf-8').split('\n') if 'lib' in x ][0]
|
|
||||||
return candidate
|
|
||||||
|
|
|
@ -59,9 +59,8 @@ def getversion(query, jsonout, default_to_current):
|
||||||
@click.option('--updatable', '-u', is_flag=True, default=False, help="Create an updatable AppImage (compatible with zsync2). Default: not updatable")
|
@click.option('--updatable', '-u', is_flag=True, default=False, help="Create an updatable AppImage (compatible with zsync2). Default: not updatable")
|
||||||
@click.option('--download-path', '-d', default='./downloads', type=str, help="Path to the download folder. Default: ./downloads")
|
@click.option('--download-path', '-d', default='./downloads', type=str, help="Path to the download folder. Default: ./downloads")
|
||||||
@click.option('--repo-path', '-r', default='.', type=str, help="Path to the final storage of the AppImage. Default: current directory")
|
@click.option('--repo-path', '-r', default='.', type=str, help="Path to the final storage of the AppImage. Default: current directory")
|
||||||
@click.option('--debug', 'debug', is_flag=True, default=False, help="Activate debug options.")
|
|
||||||
@click.argument('query')
|
@click.argument('query')
|
||||||
def build(arch, language, offline, portable, updatable, download_path, repo_path, check, checksums, sign, keep, query, debug):
|
def build(arch, language, offline, portable, updatable, download_path, repo_path, check, checksums, sign, keep, query):
|
||||||
"""Builds an Appimage with the provided options."""
|
"""Builds an Appimage with the provided options."""
|
||||||
|
|
||||||
# Multiple query support
|
# Multiple query support
|
||||||
|
@ -90,7 +89,6 @@ def build(arch, language, offline, portable, updatable, download_path, repo_path
|
||||||
for myquery in queries:
|
for myquery in queries:
|
||||||
for appbuild in loaih.build.Collection(myquery, arches):
|
for appbuild in loaih.build.Collection(myquery, arches):
|
||||||
# Configuration phase
|
# Configuration phase
|
||||||
appbuild.debug = debug
|
|
||||||
appbuild.tidy_folder = False
|
appbuild.tidy_folder = False
|
||||||
appbuild.language = language
|
appbuild.language = language
|
||||||
appbuild.offline_help = offline
|
appbuild.offline_help = offline
|
||||||
|
@ -106,7 +104,7 @@ def build(arch, language, offline, portable, updatable, download_path, repo_path
|
||||||
if check:
|
if check:
|
||||||
appbuild.check()
|
appbuild.check()
|
||||||
|
|
||||||
appbuild.download(compact = True)
|
appbuild.download()
|
||||||
appbuild.build()
|
appbuild.build()
|
||||||
if checksums:
|
if checksums:
|
||||||
appbuild.checksums()
|
appbuild.checksums()
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# encoding: utf-8
|
# encoding: utf-8
|
||||||
|
|
||||||
version = "1.4.0rc0"
|
version = "1.3.3"
|
||||||
|
|
Loading…
Reference in New Issue