Compare commits
3 Commits
Author | SHA1 | Date |
---|---|---|
|
8aeabdc0de | |
|
09f21c1f44 | |
|
9f8d9db228 |
|
@ -1,6 +1,3 @@
|
|||
venv
|
||||
test
|
||||
build
|
||||
dist
|
||||
loaih.egg-info
|
||||
venv
|
||||
**/__pycache__
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
# vim:sts=4:sw=4
|
||||
FROM python:3.9-slim-bullseye
|
||||
FROM python:3.9-slim
|
||||
|
||||
RUN mkdir /build && \
|
||||
pip install loaih
|
||||
WORKDIR /build
|
||||
RUN pip install loaih
|
||||
ENTRYPOINT [ "/usr/local/bin/loaih" ]
|
||||
CMD [ "--help" ]
|
||||
|
|
|
@ -1,19 +0,0 @@
|
|||
# vim:sts=4:sw=4
|
||||
FROM python:3.9-slim-bullseye
|
||||
|
||||
RUN mkdir /build && \
|
||||
apt update && apt install -y git && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
cd /root && \
|
||||
git clone https://git.libreitalia.org/libreitalia/loaih.git && \
|
||||
cd loaih && git checkout dependencies && \
|
||||
python3 -m venv venv && \
|
||||
. venv/bin/activate && \
|
||||
pip install build && \
|
||||
python3 -m build && \
|
||||
pip install dist/loaih*.whl && \
|
||||
deactivate && \
|
||||
ln -sf /root/loaih/venv/bin/loaih /usr/local/bin/loaih
|
||||
WORKDIR /build
|
||||
ENTRYPOINT [ "/usr/local/bin/loaih" ]
|
||||
CMD [ "--help" ]
|
|
@ -11,13 +11,15 @@ if [[ ${retval} -ne 0 ]]; then
|
|||
# for the sake of consistency, let's make the check_updates.sh script
|
||||
# executable
|
||||
chmod +x check_updates.sh
|
||||
|
||||
# Updating runtime
|
||||
if [[ -d venv ]]; then
|
||||
source venv/bin/activate
|
||||
fi
|
||||
pip3 uninstall -y loaih
|
||||
# build the actual toolkit
|
||||
python3 -m build -w
|
||||
pip3 install dist/loaih*.whl; rv=$?
|
||||
python3 -m build --sdist
|
||||
pip3 install dist/*.tar.gz; rv=$?
|
||||
if [[ -d venv ]]; then
|
||||
deactivate
|
||||
fi
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
# vim:sts=4:sw=4
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
@ -6,51 +5,46 @@ build-backend = "hatchling.build"
|
|||
[project]
|
||||
name = "loaih"
|
||||
dynamic = ["version"]
|
||||
authors = [
|
||||
{ name = "Emiliano Vavassori", email = "syntaxerrormmm@gmail.com" },
|
||||
]
|
||||
description = "LOAIH - LibreOffice AppImage Helpers, help build a LibreOffice AppImage"
|
||||
readme = "README.md"
|
||||
license = "MIT"
|
||||
requires-python = ">= 3.6"
|
||||
authors = [
|
||||
{ name = "Emiliano Vavassori", email = "syntaxerrormmm@libreoffice.org" },
|
||||
]
|
||||
dependencies = [
|
||||
"click",
|
||||
"lddcollect",
|
||||
"lxml",
|
||||
"python-magic",
|
||||
"packaging",
|
||||
"pyyaml",
|
||||
"requests"
|
||||
"requests",
|
||||
]
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Developers",
|
||||
"Intended Audience :: End Users/Desktop",
|
||||
"Intended Audience :: System Administrators",
|
||||
"Intended Audience :: Information Technology",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Natural Language :: English",
|
||||
"Operating System :: POSIX :: Linux",
|
||||
"Topic :: Office/Business",
|
||||
"Topic :: Software Development :: Build Tools",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Topic :: Office/Business :: Office Suites",
|
||||
"Topic :: Software Development :: Quality Assurance",
|
||||
"Topic :: Software Development :: Testing",
|
||||
"Topic :: Software Development :: User Interfaces"
|
||||
"Topic :: System :: Software Distribution",
|
||||
]
|
||||
|
||||
[tool.hatch.version]
|
||||
path = "src/loaih/version.py"
|
||||
|
||||
[project.scripts]
|
||||
loaih = "loaih.script:cli"
|
||||
|
||||
[project.urls]
|
||||
Homepage = "https://git.libreitalia.org/LibreItalia/loaih/"
|
||||
Homepage = "https://pypi.org/project/loaih/"
|
||||
Documentation = "https://git.libreitalia.org/libreitalia/loaih/wiki"
|
||||
Repository = "https://git.libreitalia.org/libreitalia/loaih/"
|
||||
Issues = "https://git.libreitalia.org/libreitalia/loaih/issues/"
|
||||
|
||||
[tool.hatch.version]
|
||||
path = "src/loaih/version.py"
|
||||
[publish.index]
|
||||
|
||||
[tool.hatch.build.targets.sdist]
|
||||
include = [
|
||||
"src/loaih",
|
||||
]
|
||||
[publish.index.repos.pypi]
|
||||
url = "https://upload.pypi.org/legacy/"
|
||||
|
||||
[publish.index.repos.testpypi]
|
||||
url = "https://test.pypi.org/legacy/"
|
||||
|
|
|
@ -134,7 +134,7 @@ class Solver():
|
|||
retval.query = self.text
|
||||
retval.branch = self.branch
|
||||
retval.version = self.version
|
||||
if retval.branch != 'daily' and retval.branch != 'prerelease':
|
||||
if retval.branch != 'daily':
|
||||
retval.urls['x86_64'] = self.baseurl + 'x86_64/'
|
||||
|
||||
try:
|
||||
|
@ -234,8 +234,6 @@ class NamedSolver(Solver):
|
|||
fullversion: str = str(archived_versions[-1])
|
||||
self.baseurl = ARCHIVE + fullversion + 'deb/'
|
||||
self.version = fullversion.rstrip('/')
|
||||
if self.branch == 'prerelease':
|
||||
self.baseurl = PRERELEASE
|
||||
|
||||
return self.version
|
||||
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
"""Classes and functions to build an AppImage."""
|
||||
|
||||
import os
|
||||
import datetime
|
||||
import glob
|
||||
import subprocess
|
||||
import shutil
|
||||
|
@ -12,7 +11,6 @@ import shlex
|
|||
import tempfile
|
||||
import hashlib
|
||||
import requests
|
||||
import magic
|
||||
import loaih
|
||||
|
||||
|
||||
|
@ -29,7 +27,6 @@ class Collection(list):
|
|||
arch = [ x for x in arch if version.urls[x] != '-' ]
|
||||
self.extend([ Build(version, ar) for ar in arch ])
|
||||
|
||||
class BuildException(Exception): pass
|
||||
|
||||
class Build():
|
||||
"""Builds a single version."""
|
||||
|
@ -39,12 +36,10 @@ class Build():
|
|||
LANGBASIC = [ 'en-GB' ]
|
||||
ARCHSTD = [ 'x86', 'x86_64' ]
|
||||
|
||||
def __init__(self, version: loaih.Version, arch, debug=False):
|
||||
self.debug = debug
|
||||
def __init__(self, version: loaih.Version, arch):
|
||||
self.version = version
|
||||
self.tidy_folder = True
|
||||
self.verbose = True
|
||||
self.check_dependencies = False
|
||||
self.arch = arch
|
||||
self.short_version = str.join('.', self.version.version.split('.')[0:2])
|
||||
self.branch_version = self.version.branch
|
||||
|
@ -93,27 +88,6 @@ class Build():
|
|||
def calculate(self):
|
||||
"""Calculate exclusions and other variables."""
|
||||
|
||||
if self.verbose:
|
||||
print("--- Preliminary Phase ---")
|
||||
|
||||
if self.debug and isinstance(shutil.which('apt'), str):
|
||||
# APT is found in path. We assume we can find dependencies.
|
||||
self.check_dependencies = True
|
||||
|
||||
|
||||
if self.verbose:
|
||||
print("Updating system packages cache.")
|
||||
# Updating package cache
|
||||
subprocess.run(['sudo', 'apt', 'update'], check=True, stdout=subprocess.DEVNULL)
|
||||
if self.verbose:
|
||||
print("Ensuring apt-file is installed and updated.")
|
||||
# Updating apt-file cache
|
||||
subprocess.run(['sudo', 'apt', 'install', 'apt-file', '-y'], check=True, stdout=subprocess.DEVNULL)
|
||||
subprocess.run(['sudo', 'apt-file', 'update'], check=True, stdout=subprocess.DEVNULL)
|
||||
else:
|
||||
print("CAUTION: your system seems not to include a working version of apt.\nThis will cause the AppImage to leverage system libraries when run.")
|
||||
self.check_dependencies = False
|
||||
|
||||
if self.verbose:
|
||||
print("--- Calculate Phase ---")
|
||||
|
||||
|
@ -171,6 +145,7 @@ class Build():
|
|||
# was already built.
|
||||
name = self.appimagefilename
|
||||
url = self.storage_path.rstrip('/') + self.full_path + '/'
|
||||
matching = []
|
||||
try:
|
||||
if len(loaih.match_xpath(url, f"//a[contains(@href,'{name}')]/@href")) > 0:
|
||||
# Already built.
|
||||
|
@ -182,6 +157,8 @@ class Build():
|
|||
|
||||
else:
|
||||
# Repo is local
|
||||
|
||||
|
||||
command = f"find {self.full_path} -name {self.appimagefilename}"
|
||||
res = subprocess.run(shlex.split(command),
|
||||
capture_output=True,
|
||||
|
@ -197,7 +174,7 @@ class Build():
|
|||
print(f"Found requested AppImage: {self.appimagefilename}.")
|
||||
|
||||
|
||||
def download(self, compact=False):
|
||||
def download(self):
|
||||
"""Downloads the contents of the URL as it was a folder."""
|
||||
|
||||
if self.verbose:
|
||||
|
@ -217,27 +194,19 @@ class Build():
|
|||
self.found = True
|
||||
|
||||
# Identifying downloads
|
||||
self.tarballs = [ x for x in loaih.match_xpath(self.url, "//td/a/text()") if x.endswith('tar.gz') and 'deb' in x and self.version.version in x ]
|
||||
|
||||
self.download_tarballs = []
|
||||
|
||||
# Issue #5: manage a limited number of downloads and not the full set.
|
||||
if compact:
|
||||
self.download_tarballs = self.__select_tarballs__()
|
||||
else:
|
||||
self.download_tarballs = self.tarballs
|
||||
self.tarballs = [ x for x in loaih.match_xpath(self.url, "//td/a/text()") if x.endswith('tar.gz') and 'deb' in x ]
|
||||
|
||||
# Create and change directory to the download location
|
||||
os.makedirs(self.download_path, exist_ok = True)
|
||||
os.chdir(self.download_path)
|
||||
for archive in self.download_tarballs:
|
||||
for archive in self.tarballs:
|
||||
# If the archive is already there, do not do anything.
|
||||
if os.path.exists(archive):
|
||||
continue
|
||||
|
||||
# Download the archive
|
||||
try:
|
||||
self.__download_archive_debug__(archive)
|
||||
self.__download_archive__(archive)
|
||||
except Exception as error:
|
||||
print(f"Failed to download {archive}: {error}.")
|
||||
|
||||
|
@ -248,12 +217,12 @@ class Build():
|
|||
def build(self):
|
||||
"""Building all the versions."""
|
||||
|
||||
if self.found:
|
||||
return
|
||||
|
||||
if self.verbose:
|
||||
print("--- Building Phase ---")
|
||||
|
||||
if self.found:
|
||||
return
|
||||
|
||||
# Preparation tasks
|
||||
self.appnamedir = os.path.join(self.builddir, self.appname)
|
||||
os.makedirs(self.appnamedir, exist_ok=True)
|
||||
|
@ -267,13 +236,6 @@ class Build():
|
|||
|
||||
# Build the requested version.
|
||||
self.__unpackbuild__()
|
||||
self.__prepare_contents__()
|
||||
if self.check_dependencies:
|
||||
if self.verbose:
|
||||
print("Searching for dependent libraries, it might take a while.")
|
||||
|
||||
self.__missing_dependencies__()
|
||||
self.__finalize_build__()
|
||||
|
||||
|
||||
def checksums(self):
|
||||
|
@ -453,33 +415,9 @@ class Build():
|
|||
with open(f"{file}.md5", 'w', encoding='utf-8') as checkfile:
|
||||
checkfile.write(f"{retval.hexdigest()} {os.path.basename(file)}")
|
||||
|
||||
def __download_archive__(self, archive) -> str:
|
||||
def __download_archive__(self, archive):
|
||||
return self.__download__(self.url, archive)
|
||||
|
||||
def __download_archive_debug__(self, archive) -> str:
|
||||
"""Analyses the downloaded archive to prevent issues with unpacking."""
|
||||
break_control = 0
|
||||
testedfilename = ""
|
||||
while break_control <= 5:
|
||||
timenow = datetime.datetime.now()
|
||||
testedfilename, resource = self.__download_debug__(self.url, archive)
|
||||
mime = magic.Magic(mime=True)
|
||||
mimetype = mime.from_file(testedfilename)
|
||||
if mimetype == 'application/gzip':
|
||||
return testedfilename
|
||||
|
||||
# On the contrary, we will dump a logfile, remove the download and
|
||||
# redo the download.
|
||||
with open(os.path.join(self.download_path, 'downloadfailure.log'), 'a') as logfile:
|
||||
logfile.write(f"{timenow.isoformat()};{resource.url};{mimetype}\n")
|
||||
|
||||
os.unlink(testedfilename)
|
||||
break_control += 1
|
||||
|
||||
# If it arrives here, 5 attempts to download the archive have failed.
|
||||
raise BuildException(f"All downloads failed for {archive}. Exiting.")
|
||||
|
||||
|
||||
def __download__(self, url: str, filename: str):
|
||||
basename = filename
|
||||
if '/' in filename:
|
||||
|
@ -497,63 +435,40 @@ class Build():
|
|||
file.write(chunk)
|
||||
return filename
|
||||
|
||||
def __download_debug__(self, url: str, filename: str) -> tuple[str, requests.Response]:
|
||||
basename = filename
|
||||
if '/' in filename:
|
||||
basename = filename.split('/')[-1]
|
||||
|
||||
full_url = url
|
||||
if url.endswith('/'):
|
||||
# URL has to be completed with basename of filename
|
||||
full_url = url + basename
|
||||
|
||||
with requests.get(full_url, stream=True, timeout=10) as resource:
|
||||
resource.raise_for_status()
|
||||
with open(filename, 'wb') as file:
|
||||
for chunk in resource.iter_content(chunk_size=8192):
|
||||
file.write(chunk)
|
||||
return filename, resource
|
||||
|
||||
def __select_tarballs__(self):
|
||||
retval = [ self.tarballs[0] ]
|
||||
def __unpackbuild__(self):
|
||||
# We start by filtering out tarballs from the list
|
||||
buildtarballs = [ self.tarballs[0] ]
|
||||
|
||||
# Let's process standard languages and append results to the
|
||||
# buildtarball
|
||||
if self.language == 'basic':
|
||||
if self.offline_help:
|
||||
retval.extend([ x for x in self.tarballs if 'pack_en-GB' in x ])
|
||||
buildtarballs.extend([ x for x in self.tarballs if 'pack_en-GB' in x ])
|
||||
else:
|
||||
retval.extend([ x for x in self.tarballs if 'langpack_en-GB' in x])
|
||||
buildtarballs.extend([ x for x in self.tarballs if 'langpack_en-GB' in x])
|
||||
elif self.language == 'standard':
|
||||
for lang in Build.LANGSTD:
|
||||
if self.offline_help:
|
||||
retval.extend([ x for x in self.tarballs if 'pack_' + lang in x ])
|
||||
buildtarballs.extend([ x for x in self.tarballs if 'pack_' + lang in x ])
|
||||
else:
|
||||
retval.extend([ x for x in self.tarballs if 'langpack_' + lang in x ])
|
||||
buildtarballs.extend([ x for x in self.tarballs if 'langpack_' + lang in x ])
|
||||
elif self.language == 'full':
|
||||
if self.offline_help:
|
||||
# We need also all help. Let's replace buildtarball with the
|
||||
# whole bunch
|
||||
retval = self.tarballs
|
||||
buildtarballs = self.tarballs
|
||||
else:
|
||||
retval.extend([ x for x in self.tarballs if 'langpack' in x ])
|
||||
buildtarballs.extend([ x for x in self.tarballs if 'langpack' in x ])
|
||||
else:
|
||||
# Looping for each language in self.language
|
||||
for lang in self.language.split(","):
|
||||
if self.offline_help:
|
||||
retval.extend([ x for x in self.tarballs
|
||||
buildtarballs.extend([ x for x in self.tarballs
|
||||
if 'pack' + lang in x ])
|
||||
else:
|
||||
retval.extend([ x for x in self.tarballs
|
||||
buildtarballs.extend([ x for x in self.tarballs
|
||||
if 'langpack' + lang in x ])
|
||||
|
||||
return retval
|
||||
|
||||
|
||||
def __unpackbuild__(self):
|
||||
# We start by filtering out tarballs from the list
|
||||
buildtarballs = self.__select_tarballs__()
|
||||
|
||||
os.chdir(self.appnamedir)
|
||||
|
||||
# Unpacking the tarballs
|
||||
|
@ -564,17 +479,13 @@ class Build():
|
|||
subprocess.run(shlex.split(
|
||||
f"tar xzf {self.download_path}/{archive}"), check=True)
|
||||
|
||||
def __prepare_contents__(self):
|
||||
# create appimagedir
|
||||
if self.verbose:
|
||||
print("---- Preparing the build ----")
|
||||
self.appimagedir = os.path.join(self.appnamedir, self.appname + '.AppDir')
|
||||
self.appimagedir = os.path.join(self.builddir, self.appname, self.appname + '.AppDir')
|
||||
os.makedirs(self.appimagedir, exist_ok = True)
|
||||
|
||||
# At this point, let's decompress the deb packages
|
||||
if self.verbose:
|
||||
print("Unpacking main archives")
|
||||
|
||||
subprocess.run(shlex.split(
|
||||
r"find .. -iname '*.deb' -exec dpkg -x {} . \;"
|
||||
), cwd=self.appimagedir, check=True)
|
||||
|
@ -587,9 +498,6 @@ class Build():
|
|||
), cwd=self.appimagedir, check=True)
|
||||
|
||||
# Changing desktop file
|
||||
if self.verbose:
|
||||
print("Preparing .desktop file.")
|
||||
|
||||
subprocess.run(shlex.split(
|
||||
r"find . -iname startcenter.desktop -exec cp {} . \;"
|
||||
), cwd=self.appimagedir, check=True)
|
||||
|
@ -599,21 +507,11 @@ class Build():
|
|||
r"startcenter.desktop"
|
||||
), cwd=self.appimagedir, check=False)
|
||||
|
||||
if self.verbose:
|
||||
print("Preparing icon file.")
|
||||
subprocess.run(shlex.split(
|
||||
r"find . -name '*startcenter.png' -path '*hicolor*48x48*' " +
|
||||
r"-exec cp {} . \;"
|
||||
), cwd=self.appimagedir, check=True)
|
||||
|
||||
# Finding path to main executable
|
||||
cmd = subprocess.run(shlex.split(
|
||||
r"find -iname soffice.bin -print"
|
||||
), cwd=self.appimagedir, check = True, capture_output=True)
|
||||
self.main_executable = os.path.abspath(os.path.join(
|
||||
self.appimagedir,
|
||||
cmd.stdout.strip().decode('utf-8')))
|
||||
|
||||
# Find the name of the binary called in the desktop file.
|
||||
binaryname = ''
|
||||
with open(
|
||||
|
@ -625,165 +523,63 @@ class Build():
|
|||
binaryname = line.split('=')[-1].split(' ')[0]
|
||||
# Esci al primo match
|
||||
break
|
||||
|
||||
|
||||
#binary_exec = subprocess.run(shlex.split(r"awk 'BEGIN { FS = \"=\" } /^Exec/ { print $2; exit }' startcenter.desktop | awk '{ print $1 }'"), cwd=self.appimagedir, text=True, encoding='utf-8')
|
||||
#binaryname = binary_exec.stdout.strip("\n")
|
||||
|
||||
# Creating a soft link so the executable in the desktop file is present
|
||||
|
||||
bindir=os.path.join(self.appimagedir, 'usr', 'bin')
|
||||
os.makedirs(bindir, exist_ok = True)
|
||||
subprocess.run(shlex.split(
|
||||
r"find ../../opt -iname soffice.bin -path '*program*' " +
|
||||
r"find ../../opt -iname soffice -path '*program*' " +
|
||||
r"-exec ln -sf {} ./%s \;" % binaryname
|
||||
), cwd=bindir, check=True)
|
||||
|
||||
def __missing_dependencies__(self):
|
||||
"""Finds and copy in the appimagedir any missing libraries."""
|
||||
# If the system permits it, we leverage lddcollect
|
||||
# to find the packages that contain .so dependencies in the main build.
|
||||
import lddcollect
|
||||
# We first process the ELF
|
||||
raw = lddcollect.process_elf(self.main_executable, verbose = False, dpkg = True)
|
||||
|
||||
# If all works as expected, we obtain a tuple of:
|
||||
# (debian_packages, all_libraries, files_not_found)
|
||||
debian_packages = raw[0]
|
||||
not_found = raw[2]
|
||||
|
||||
if len(debian_packages) != 0:
|
||||
# Creating temporary folders
|
||||
debs = [ x.split(':')[0] for x in debian_packages ]
|
||||
downloadpath = os.path.abspath(os.path.join(self.builddir, 'dependencies'))
|
||||
os.makedirs(downloadpath)
|
||||
|
||||
|
||||
if self.verbose:
|
||||
print("Downloading missing dependencies, please wait.")
|
||||
|
||||
# Let's try to find and install also other libraries
|
||||
additional = list(dict.fromkeys([ Helpers.lib_to_deb(x) for x in not_found ]))
|
||||
debs.extend(additional)
|
||||
|
||||
# It seems the download command does not download dependencies of
|
||||
# the packages.
|
||||
if self.verbose:
|
||||
print("Constructing the dependency tree.")
|
||||
|
||||
for deb in debian_packages:
|
||||
debs.extend(Helpers.deb_dependencies(deb))
|
||||
|
||||
# Re-cleaning up the dependency tree
|
||||
debs = list(dict.fromkeys(debs))
|
||||
|
||||
# We download the missing dependencies leveraging apt
|
||||
subprocess.run(shlex.split(
|
||||
r"apt download " + " ".join(debs)
|
||||
), cwd=downloadpath, check=True)
|
||||
|
||||
# then we install them inside a temporary path
|
||||
temporary = os.path.abspath(os.path.join(downloadpath, 'temp'))
|
||||
os.makedirs(temporary)
|
||||
subprocess.run(shlex.split(
|
||||
r"find " + downloadpath + r" -iname \*.deb -exec dpkg -x {} " + temporary + r" \;"
|
||||
), cwd=self.builddir, check=True)
|
||||
|
||||
# We are finally copying the .so files in the same path as main_executable
|
||||
libdirs = [ 'lib/x86_64-linux-gnu', 'usr/lib/x86_64-linux-gnu' ]
|
||||
for libdir in libdirs:
|
||||
fulllibdir = os.path.abspath(os.path.join(temporary, libdir))
|
||||
subprocess.run(shlex.split(
|
||||
f"cp -Ra {fulllibdir}/. {os.path.dirname(self.main_executable)}/"
|
||||
), cwd=temporary, check=True)
|
||||
|
||||
if self.debug:
|
||||
with open(os.path.abspath(os.path.join(self.storage_path, 'dependencies.lst')), 'w', encoding="utf-8") as deplist:
|
||||
deplist.write("\n".join(debs))
|
||||
|
||||
def __finalize_build__(self):
|
||||
if self.verbose:
|
||||
print("Finalizing build...")
|
||||
# Cleaning up AppDir
|
||||
cleanup_dirs = [ 'etc', 'lib', 'lib64', 'usr/lib', 'usr/local' ]
|
||||
for local in cleanup_dirs:
|
||||
shutil.rmtree(os.path.abspath(os.path.join(self.appimagedir, local)), ignore_errors=True)
|
||||
|
||||
# Download AppRun from github
|
||||
apprunurl = r"https://github.com/AppImage/AppImageKit/releases/"
|
||||
apprunurl += f"download/continuous/AppRun-{self.arch}"
|
||||
dest = os.path.join(self.appimagedir, 'AppRun')
|
||||
self.__download__(apprunurl, dest)
|
||||
os.chmod(dest, 0o755)
|
||||
|
||||
|
||||
# Dealing with extra options
|
||||
buildopts = []
|
||||
if self.sign:
|
||||
buildopts.append('--sign')
|
||||
|
||||
|
||||
# adding zsync build if updatable
|
||||
if self.updatable:
|
||||
buildopts.append(f"-u 'zsync|{self.zsyncfilename}'")
|
||||
|
||||
|
||||
buildopts_str = str.join(' ', buildopts)
|
||||
|
||||
|
||||
# Build the number-specific build
|
||||
if self.verbose:
|
||||
print("---- Start building ----")
|
||||
subprocess.run(shlex.split(
|
||||
f"{self.appnamedir}/appimagetool {buildopts_str} -v " +
|
||||
f"{self.appimagedir}"
|
||||
f"./{self.appname}.AppDir/"
|
||||
), env={ "VERSION": self.appversion }, check=True)
|
||||
print("---- End building ----")
|
||||
else:
|
||||
subprocess.run(shlex.split(
|
||||
f"{self.appnamedir}/appimagetool {buildopts_str} -v " +
|
||||
f"{self.appimagedir}"
|
||||
f"./{self.appname}.AppDir/"
|
||||
), env={ "VERSION": self.appversion }, stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL, check=True)
|
||||
|
||||
|
||||
if self.verbose:
|
||||
print(f"Built AppImage version {self.appversion}")
|
||||
|
||||
|
||||
# Cleanup phase, before new run.
|
||||
for deb in glob.glob(self.appnamedir + '/*.deb'):
|
||||
os.remove(deb)
|
||||
subprocess.run(shlex.split(
|
||||
r"find . -mindepth 1 -maxdepth 1 -type d -exec rm -rf {} \+"
|
||||
), check=True)
|
||||
|
||||
|
||||
self.built = True
|
||||
|
||||
def __del__(self):
|
||||
"""Destructor"""
|
||||
if not self.debug:
|
||||
# Cleaning up build directory
|
||||
shutil.rmtree(self.builddir)
|
||||
|
||||
|
||||
class Helpers:
|
||||
|
||||
@staticmethod
|
||||
def deb_dependencies(package_name):
|
||||
"""Returns the array of the dependencies of that package."""
|
||||
|
||||
# First pass: find dependency of that package in raw output
|
||||
pass1 = subprocess.Popen(shlex.split(
|
||||
f"apt-cache depends --recurse --no-recommends --no-suggests --no-conflicts --no-breaks --no-replaces --no-enhances --no-pre-depends {package_name}"
|
||||
), stdout=subprocess.PIPE)
|
||||
|
||||
# Second pass: only grep interesting lines.
|
||||
pass2 = subprocess.Popen(shlex.split(
|
||||
r"grep '^\w'"
|
||||
), stdin=pass1.stdout, stdout=subprocess.PIPE, encoding='utf-8')
|
||||
stdout = pass2.communicate()[0]
|
||||
|
||||
return stdout.strip().split("\n")
|
||||
|
||||
@staticmethod
|
||||
def lib_to_deb(libraryname):
|
||||
"""Uses system tools to identify the missing package."""
|
||||
|
||||
libsearch = subprocess.run(shlex.split(
|
||||
f"sudo apt-file find -lx {libraryname}$"
|
||||
), check=True, capture_output=True)
|
||||
candidate = [ x for x in libsearch.stdout.decode('utf-8').split('\n') if 'lib' in x ][0]
|
||||
return candidate
|
||||
# Cleaning up build directory
|
||||
shutil.rmtree(self.builddir)
|
||||
|
|
|
@ -9,12 +9,10 @@ import json
|
|||
import click
|
||||
import yaml
|
||||
import loaih
|
||||
import loaih.version
|
||||
import loaih.build
|
||||
|
||||
|
||||
@click.group()
|
||||
@click.version_option(loaih.version.version)
|
||||
def cli():
|
||||
"""Helps with command line commands."""
|
||||
|
||||
|
@ -59,9 +57,8 @@ def getversion(query, jsonout, default_to_current):
|
|||
@click.option('--updatable', '-u', is_flag=True, default=False, help="Create an updatable AppImage (compatible with zsync2). Default: not updatable")
|
||||
@click.option('--download-path', '-d', default='./downloads', type=str, help="Path to the download folder. Default: ./downloads")
|
||||
@click.option('--repo-path', '-r', default='.', type=str, help="Path to the final storage of the AppImage. Default: current directory")
|
||||
@click.option('--debug', 'debug', is_flag=True, default=False, help="Activate debug options.")
|
||||
@click.argument('query')
|
||||
def build(arch, language, offline, portable, updatable, download_path, repo_path, check, checksums, sign, keep, query, debug):
|
||||
def build(arch, language, offline, portable, updatable, download_path, repo_path, check, checksums, sign, keep, query):
|
||||
"""Builds an Appimage with the provided options."""
|
||||
|
||||
# Multiple query support
|
||||
|
@ -90,7 +87,6 @@ def build(arch, language, offline, portable, updatable, download_path, repo_path
|
|||
for myquery in queries:
|
||||
for appbuild in loaih.build.Collection(myquery, arches):
|
||||
# Configuration phase
|
||||
appbuild.debug = debug
|
||||
appbuild.tidy_folder = False
|
||||
appbuild.language = language
|
||||
appbuild.offline_help = offline
|
||||
|
@ -106,7 +102,7 @@ def build(arch, language, offline, portable, updatable, download_path, repo_path
|
|||
if check:
|
||||
appbuild.check()
|
||||
|
||||
appbuild.download(compact = True)
|
||||
appbuild.download()
|
||||
appbuild.build()
|
||||
if checksums:
|
||||
appbuild.checksums()
|
||||
|
|
|
@ -1,4 +1 @@
|
|||
#!/usr/bin/env python3
|
||||
# encoding: utf-8
|
||||
|
||||
version = "1.4.0rc1"
|
||||
__version__ = "1.3.3"
|
||||
|
|
Loading…
Reference in New Issue