Compare commits

..

No commits in common. "main" and "signature" have entirely different histories.

21 changed files with 652 additions and 1647 deletions

View File

@ -1,33 +0,0 @@
---
kind: pipeline
type: docker
name: default
steps:
- name: compile
image: python
commands:
- pip install wheel
- python setup.py bdist_wheel
- name: release
image: plugins/gitea-release
settings:
api_key:
from_secret: loaih-deploy
base_url: https://git.libreitalia.org
files: dist/*.whl
checksum: md5
draft: true
- name: publish
image: plugins/pypi
settings:
username: __token__
password:
from_secret: pypi
trigger:
event:
- tag
- push

2
.gitignore vendored
View File

@ -1,6 +1,4 @@
venv
test
build
dist
loaih.egg-info
**/__pycache__

View File

@ -1,8 +0,0 @@
# vim:sts=4:sw=4
FROM python:3.9-slim-bullseye
RUN mkdir /build && \
pip install loaih
WORKDIR /build
ENTRYPOINT [ "/usr/local/bin/loaih" ]
CMD [ "--help" ]

View File

@ -1,19 +0,0 @@
# vim:sts=4:sw=4
FROM python:3.9-slim-bullseye
RUN mkdir /build && \
apt update && apt install -y git && \
rm -rf /var/lib/apt/lists/* && \
cd /root && \
git clone https://git.libreitalia.org/libreitalia/loaih.git && \
cd loaih && git checkout dependencies && \
python3 -m venv venv && \
. venv/bin/activate && \
pip install build && \
python3 -m build && \
pip install dist/loaih*.whl && \
deactivate && \
ln -sf /root/loaih/venv/bin/loaih /usr/local/bin/loaih
WORKDIR /build
ENTRYPOINT [ "/usr/local/bin/loaih" ]
CMD [ "--help" ]

View File

@ -1,18 +0,0 @@
# LibreOffice AppImage Helper - `loaih` #
LibreOffice AppImage Helper is an enhanced Python porting from [previous work
by Antonio Faccioli](https://github.com/antoniofaccioli/libreoffice-appimage).
It helps building a LibreOffice AppImage from officially released .deb files
with some options.
## Getting options and help ##
You can ask the app some information on how you can use it:
$ loaih --help
$ loaih getversion --help
$ loaih build --help
$ loaih batch --help
For any other information needed, please visit the [wiki of the
project](https://git.libreitalia.org/libreitalia/loaih/wiki).

View File

@ -11,16 +11,10 @@ if [[ ${retval} -ne 0 ]]; then
# for the sake of consistency, let's make the check_updates.sh script
# executable
chmod +x check_updates.sh
if [[ -d venv ]]; then
source venv/bin/activate
fi
pip3 uninstall -y loaih
# build the actual toolkit
python3 -m build -w
pip3 install dist/loaih*.whl; rv=$?
if [[ -d venv ]]; then
deactivate
fi
python3 setup.py bdist_wheel
pip3 install dist/*.whl; rv=$?
if [[ ${rv} -eq 0 ]]; then
# cleanup

View File

@ -1,69 +0,0 @@
---
data:
repo: https://appimages.libreitalia.org
remote_host: ciccio
remote_path: /var/lib/nethserver/vhost/appimages
download: /var/tmp/downloads
force: false
sign: true
builds:
- query: daily
language: basic
offline_help: false
portable: false
- query: daily
language: basic
offline_help: true
portable: false
- query: daily
language: basic
offline_help: false
portable: true
- query: daily
language: basic
offline_help: true
portable: true
- query: daily
language: standard
offline_help: false
portable: false
- query: daily
language: standard
offline_help: true
portable: false
- query: daily
language: standard
offline_help: false
portable: true
- query: daily
language: standard
offline_help: true
portable: true
- query: daily
language: full
offline_help: false
portable: false
- query: daily
language: full
offline_help: true
portable: false
- query: daily
language: full
offline_help: false
portable: true
- query: daily
language: full
offline_help: true
portable: true

View File

@ -1,69 +1,67 @@
---
data:
repo: https://appimages.libreitalia.org
remote_host: ciccio
remote_path: /var/lib/nethserver/vhost/appimages
repo: /srv/http/appimage.sys42.eu
download: /var/tmp/downloads
force: false
sign: true
force: no
sign: yes
builds:
- query: fresh
language: basic
offline_help: false
portable: false
offline_help: no
portable: no
- query: fresh
language: basic
offline_help: true
portable: false
offline_help: yes
portable: no
- query: fresh
language: basic
offline_help: false
portable: true
offline_help: no
portable: yes
- query: fresh
language: basic
offline_help: true
portable: true
offline_help: yes
portable: yes
- query: fresh
language: standard
offline_help: false
portable: false
offline_help: no
portable: no
- query: fresh
language: standard
offline_help: true
portable: false
offline_help: yes
portable: no
- query: fresh
language: standard
offline_help: false
portable: true
offline_help: no
portable: yes
- query: fresh
language: standard
offline_help: true
portable: true
offline_help: yes
portable: yes
- query: fresh
language: full
offline_help: false
portable: false
offline_help: no
portable: no
- query: fresh
language: full
offline_help: true
portable: false
offline_help: yes
portable: no
- query: fresh
language: full
offline_help: false
portable: true
offline_help: no
portable: yes
- query: fresh
language: full
offline_help: true
portable: true
offline_help: yes
portable: yes

344
loaih/__init__.py Normal file
View File

@ -0,0 +1,344 @@
#!/usr/bin/env python3
import urllib.request
import loaih.versions as versions
from lxml import etree
import tempfile, os, sys, glob, subprocess, shutil, re
class Build(object):
LANGSTD = [ 'ar', 'de', 'en-GB', 'es', 'fr', 'it', 'ja', 'ko', 'pt', 'pt-BR', 'ru', 'zh-CN', 'zh-TW' ]
LANGBASIC = [ 'en-GB' ]
ARCHSTD = [ u'x86', u'x86_64' ]
def __init__(self, query, arch):
"""Build all versions that can be found in the indicated repo."""
self.query = query
self.queried_name = False if '.' in self.query else True
self.arch = arch
self.url = {}
self.language = 'basic'
self.offline_help = False
self.portable = False
self.updatable = True
self.sign = False
self.storage_path = '/srv/http/appimage.sys42.eu'
self.download_path = '/var/tmp/downloads'
# Specific build version
self.appversion = ''
self.genappversion = ''
self.appimagefilename = {}
self.genappimagefilename = {}
# Getting versions and so on
v = versions.BuildVersion(self.query)
# Creating a tempfile
self.builddir = tempfile.mkdtemp()
self.tarballs = {}
self.appname = 'LibreOffice' if not self.query == 'daily' and not self.query == 'prerelease' else 'LibreOfficeDev'
self.version = v.version
self.url = v.basedirurl
self.built = { u'x86': False, u'x86_64': False }
# Preparing the default for the relative path on the storage for
# different versions.
# The path will evaluated as part of the check() function, as it is
# understood the storage_path can be changed before that phase.
self.relative_path = []
self.full_path = ''
def calculate(self):
"""Calculate exclusions and other variables."""
# Incompatibilities - if portable and updatable are asked together,
# only portable will be built.
if self.portable and self.updatable:
print("Upgradable and portable options were required together. Building only portable.")
self.updatable = False
if self.updatable and not self.queried_name:
# If the queried version was a numbered version, doesn't make sense
# to build an updatable version.
self.updatable = False
# Mandate to the private function to calculate the full_path available
# for the storage and the checks.
self.__calculate_full_path__()
# Building expected AppImageName
self.languagepart = "."
if ',' in self.language:
self.languagepart += self.language.replace(',', '-')
else:
self.languagepart += self.language
self.helppart = '.help' if self.offline_help else ''
# If the build was called by queried name, build from latest release available but build with the most generic name
self.appversion = self.version + self.languagepart + self.helppart
myver = str.join('.', self.version.split('.')[0:2])
self.genappversion = myver + self.languagepart + self.helppart
for arch in Build.ARCHSTD:
self.appimagefilename[arch] = self.appname + '-' + self.appversion + f'-{arch}.AppImage'
self.genappimagefilename[arch] = self.appname + '-' + self.genappversion + f'-{arch}.AppImage'
def check(self):
"""Checking if the requested AppImage has been already built."""
for arch in self.arch:
# For generalized builds, we need to check if there are .ver file
# and it contains the specific version found.
print("Debug: searching for {file}".format(file = self.genappimagefilename[arch] + '.ver'))
res = subprocess.run("find {path} -name {appimage}'".format(
path = self.full_path,
appimage = self.genappimagefilename[arch] + '.ver'
), shell=True, capture_output=True, env={ "LC_ALL": "C" })
if "No such file or directory" in res.stderr.decode('utf-8'):
# Folder is not existent: so the version was not built
# Build stays false, and we go to the next arch
continue
if res.stdout:
# All good, the command was executed fine.
for file in res.stdout.decode('utf-8').strip('\n').split('\n'):
if self.version in open(file, 'r').read():
self.built[arch] = True
print("Debug: searching for {file}".format(file = self.appimagefilename[arch]))
res = subprocess.run("find {path} -name '{appimage}'".format(
path = self.full_path,
appimage = self.appimagefilename[arch]
), shell=True, capture_output=True)
if res.stdout:
if len(res.stdout.decode('utf-8').strip('\n')) > 1:
self.built[arch] = True
if self.built[arch]:
print("The requested AppImage already exists on storage for {arch}. I'll skip downloading, building and moving the results.".format(arch=arch))
def __calculate_full_path__(self):
"""Calculate relative path of the build, based on internal other variables."""
if len(self.relative_path) == 0:
if self.query == 'daily':
self.relative_path.append('daily')
elif self.query == 'prerelease':
self.relative_path.append('prerelease')
# Not the same check, an additional one
if self.portable:
self.relative_path.append('portable')
fullpath_arr = self.storage_path.split('/')
# Joining relative path only if it is not null
if len(self.relative_path) > 0:
fullpath_arr.extend(self.relative_path)
self.full_path = re.sub(r"/+", '/', str.join('/', fullpath_arr))
def download(self):
"""Downloads the contents of the URL as it was a folder."""
for arch in self.arch:
# Checking if a valid path has been provided
if self.url[arch] == '-':
print("No build has been provided for the requested AppImage for {arch}. Continue with other options.".format(arch = arch))
# Faking already built it so to skip other checks.
self.built[arch] = True
continue
if self.built[arch]:
print("A build for {arch} was already found. Skipping specific packages.".format(arch = arch))
continue
contents = etree.HTML(urllib.request.urlopen(self.url[arch]).read()).xpath("//td/a")
self.tarballs[arch] = [ x.text for x in contents if x.text.endswith('tar.gz') and 'deb' in x.text ]
tarballs = self.tarballs[arch]
maintarball = tarballs[0]
os.makedirs(self.download_path, exist_ok = True)
os.chdir(self.download_path)
for archive in tarballs:
# If the archive is already there, do not do anything.
if os.path.exists(os.path.join(self.download_path, archive)):
print("Archive %s is already there! Sweet" % archive)
continue
# Download the archive
try:
urllib.request.urlretrieve(self.url[arch] + archive, archive)
except:
print("Failed to download {archive}.".format(archive = archive))
print("Got %s." % archive)
def build(self):
"""Building all the versions."""
# We have 4 builds to do:
# * standard languages, no help
# * standard languages + offline help
# * all languages, no help
# * all languages + offline help
for arch in self.arch:
if self.built[arch]:
# Already built for arch or path not available. User has already been warned.
continue
# Preparation tasks
self.appnamedir = os.path.join(self.builddir, self.appname)
self.appimagedir = os.path.join(self.builddir, self.appname, self.appname + '.AppDir')
os.makedirs(self.appimagedir, exist_ok = True)
# And then cd to the appname folder.
os.chdir(self.appnamedir)
# Download appimagetool from github
appimagetoolurl = "https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-{arch}.AppImage".format(arch = arch)
urllib.request.urlretrieve(appimagetoolurl, 'appimagetool')
os.chmod('appimagetool', 0o755)
# Build the requested version.
if self.queried_name and not self.portable:
# If it is portable, do not generate a generalized version
self.__unpackbuild__(arch, True)
self.__unpackbuild__(arch)
def __unpackbuild__(self, arch, generalize = False):
if generalize and self.portable:
# Doesn't particularly make sense to build a generic portable
# version. Just skipping the specific generic build
return
# We start by filtering out tarballs from the list
buildtarballs = [ self.tarballs[arch][0] ]
# Let's process standard languages and append results to the
# buildtarball
if self.language == 'basic':
if self.offline_help:
buildtarballs.extend([ x for x in self.tarballs[arch] if 'pack_en-GB' in x ])
else:
buildtarballs.extend([ x for x in self.tarballs[arch] if 'langpack_en-GB' in x])
elif self.language == 'standard':
for lang in Build.LANGSTD:
if self.offline_help:
buildtarballs.extend([ x for x in self.tarballs[arch] if ('pack_' + lang) in x ])
else:
buildtarballs.extend([ x for x in self.tarballs[arch] if ('langpack_' + lang) in x ])
elif self.language == 'full':
if self.offline_help:
# We need also all help. Let's replace buildtarball with the
# whole bunch
buildtarballs = self.tarballs[arch]
else:
buildtarballs.extend([ x for x in self.tarballs[arch] if 'langpack' in x ])
else:
# Looping for each language in self.language
for lang in self.language.split(","):
if self.offline_help:
buildtarballs.extend([ x for x in self.tarballs[arch] if ('pack' + lang) in x ])
else:
buildtarballs.extend([ x for x in self.tarballs[arch] if ('langpack' + lang) in x ])
# Unpacking the tarballs
for archive in buildtarballs:
subprocess.run("tar xzf {folder}/{archive}".format(folder = self.download_path, archive = archive), shell=True)
os.chdir(self.appnamedir)
os.makedirs(self.appimagedir, exist_ok = True)
# At this point, let's decompress the deb packages
subprocess.run("find .. -iname '*.deb' -exec dpkg -x {} . \;", shell=True, cwd=self.appimagedir)
if self.portable:
shortversion = str.join('.', self.version.split('.')[:3])
subprocess.run("find . -type f -iname 'bootstraprc' -exec sed -i 's|^UserInstallation=.*|UserInstallation=\$SYSUSERCONFIG/libreoffice/%s|g' {} \+" % shortversion, shell=True, cwd=self.appimagedir)
# Changing desktop file
subprocess.run("find . -iname startcenter.desktop -exec cp {} . \;", shell=True, cwd=self.appimagedir)
subprocess.run("sed -i -e 's:^Name=.*$:Name=%s:' startcenter.desktop" % self.appname, shell=True, cwd=self.appimagedir)
subprocess.run("find . -name '*startcenter.png' -path '*hicolor*48x48*' -exec cp {} . \;", shell=True, cwd=self.appimagedir)
# Find the name of the binary called in the desktop file.
binaryname = subprocess.check_output("awk 'BEGIN { FS = \"=\" } /^Exec/ { print $2; exit }' startcenter.desktop | awk '{ print $1 }'", shell=True, cwd=self.appimagedir).decode('utf-8').strip('\n')
bindir=os.path.join(self.appimagedir, 'usr', 'bin')
os.makedirs(bindir, exist_ok = True)
subprocess.run("find ../../opt -iname soffice -path '*program*' -exec ln -sf {} ./%s \;" % binaryname, shell=True, cwd=bindir)
# Download AppRun from github
apprunurl = "https://github.com/AppImage/AppImageKit/releases/download/continuous/AppRun-{arch}".format(arch = arch)
dest = os.path.join(self.appimagedir, 'AppRun')
urllib.request.urlretrieve(apprunurl, dest)
os.chmod(dest, 0o755)
# Building app
if self.updatable:
# Updatable make sense only for generic images for fresh, still,
# daily. If a request was for a specific version, I'd not build an
# updatable version.
# zsync name was generated already
# Dealing with extra options
buildopts = []
if self.sign:
buildopts.append('--sign')
# If asked to do a generalized build:
if generalize:
subprocess.run("VERSION={version} ./appimagetool {buildopts} -u 'zsync|{zsync}' -v ./{appname}.AppDir/".format(version = self.genappversion, buildopts = str.join(' ', buildopts), zsync = self.genappimagefilename[arch] + '.zsync', appname = self.appname), shell=True)
# Build version file management
with open(self.genappimagefilename[arch] + '.ver', 'w') as v:
v.write(self.version)
else:
subprocess.run("VERSION={version} ./appimagetool {buildopts} -u 'zsync|{zsync}' -v ./{appname}.AppDir/".format(version = self.appversion, buildopts = str.join(' ', buildopts), zsync = self.appimagefilename[arch] + '.zsync', appname = self.appname), shell=True)
else:
if generalize:
subprocess.run("VERSION={version} ./appimagetool {buildopts} -v ./{appname}.AppDir/".format(version = self.genappversion, buildopts = str.join(' ', buildopts), appname = self.appname), shell=True)
with open(self.genappimagefilename[arch] + '.ver', 'w') as v:
v.write(self.version)
else:
subprocess.run("VERSION={version} ./appimagetool {buildopts} -v ./{appname}.AppDir/".format(version = self.appversion, buildopts = str.join(' ', buildopts), appname = self.appname), shell=True)
print("Built AppImage version {version}".format(version = self.appversion))
# Cleanup phase, before new run.
for deb in glob.glob(self.appnamedir + '/*.deb'):
os.remove(deb)
subprocess.run("find . -mindepth 1 -maxdepth 1 -type d -exec rm -rf {} \+", shell=True)
def checksums(self):
"""Create checksums of the built versions."""
if all(self.built.values()):
# All checksums are already created.
return
# On the contrary, checksums will be in any case overwritten if
# existent, but generated only for built packages anyways
os.chdir(self.appnamedir)
for appimage in glob.glob('*.AppImage*'):
if appimage.endswith('.ver'):
# Skipping checksums for .ver files.
continue
# See if a checksum already exist
if not os.path.exists(appimage + '.md5'):
subprocess.run("md5sum {appimage} > {appimage}.md5".format(appimage = appimage), shell=True)
def publish(self):
"""Moves built versions to definitive storage."""
if all(self.built.values()):
# All files are already present in the full_path
return
os.chdir(self.appnamedir)
# Forcing creation of subfolders, in case there is a new build
os.makedirs(self.full_path, exist_ok = True)
subprocess.run("find . -iname '*.AppImage*' -exec cp -f {} %s \;" % self.full_path, shell=True)
def __del__(self):
"""Destructor"""
# Cleaning up build directory
shutil.rmtree(self.builddir)

93
loaih/versions.py Normal file
View File

@ -0,0 +1,93 @@
#!/usr/bin/env python
# encoding: utf-8
import urllib.request
from lxml import etree
from packaging.version import parse as parse_version
class BuildVersion(object):
ARCHIVE = "https://downloadarchive.documentfoundation.org/libreoffice/old/"
RELEASE = "https://download.documentfoundation.org/libreoffice/stable/"
DAILY = "https://dev-builds.libreoffice.org/daily/master/Linux-rpm_deb-x86_64@tb87-TDF/"
PRERELEASE = "https://dev-builds.libreoffice.org/pre-releases/deb/x86_64/"
def __init__(self, query):
self.query = query
self.version = ''
self.basedirurl = {}
# Parsing the query input.
if '.' in self.query:
# Numbered self.version. Let's check it is a 4 dotted release
if len(self.query.split('.')) == 4:
self.version = self.query
else:
# If not 4 dotted, let's search for the 4 dotted version
self.version = self.__getlatestrel(self.query)
self.basedirurl = self.__getbaseurl(self.version)
else:
# String self.versions.
a = self.__getbranchrel(self.query)
self.version = a['version']
self.basedirurl = a['basedirurl']
def __getlatestrel(self, basever):
"""Search in downloadarchive for the latest version matching baseversion."""
versionlist = etree.HTML(urllib.request.urlopen(BuildVersion.ARCHIVE).read()).xpath('//td/a')
# Getting a more polished matching list
cleanlist = list(dict.fromkeys([x.text.strip('/') for x in versionlist if x.text.startswith(basever)]))
# Sorting, then returning the last version
return sorted(cleanlist)[-1]
def __getbranchrel(self, branch):
"""Based on branch names, get the release number."""
basedirurl = {}
version = ''
if branch == 'daily':
# The daily builds can be mostly distinguished by the day of build
# (official version is constant.
# The last built version is the next-to-last version [-2] on the page.
fulldailypath = etree.HTML(urllib.request.urlopen(BuildVersion.DAILY).read()).xpath('//td/a')[-2].text
dailyversion = fulldailypath.split('_')[0].replace('-', '')
version
newurl = str.join('/', [ BuildVersion.DAILY, fulldailypath, '' ])
basedirurl = { u'x86_64': newurl, u'x86': '-' }
version = etree.HTML(urllib.request.urlopen(newurl).read()).xpath('//td/a')[1].text.split('_')[1]
return { 'version': version + '-' + dailyversion, 'basedirurl': basedirurl }
if branch == 'prerelease':
version = etree.HTML(urllib.request.urlopen(BuildVersion.PRERELEASE).read()).xpath('//td/a')[1].text.split('_')[1]
basedirurl = { u'x86': '-', u'x86_64': BuildVersion.PRERELEASE }
return { 'version': version, 'basedirurl': basedirurl }
# Stable releases.
versions = etree.HTML(urllib.request.urlopen(BuildVersion.RELEASE).read()).xpath('//td/a')
index = 1
if branch == 'still':
index = -2
elif branch == 'fresh':
index = -1
version = self.__getlatestrel(versions[index].text.strip('/'))
return { 'version': version, 'basedirurl': self.__getbaseurl(version) }
def __getbaseurl(self, version):
"""Returns the links based on the numeric version."""
basedirurl = {}
url = BuildVersion.ARCHIVE + '/' + version + '/deb/'
# x86 binaries are not anymore offered after 6.3.0.
if parse_version(version) < parse_version('6.3.0'):
basedirurl[u'x86'] = url + 'x86/'
else:
basedirurl[u'x86'] = '-'
basedirurl[u'x86_64'] = url + 'x86_64/'
return basedirurl

View File

@ -1,69 +0,0 @@
---
data:
repo: https://appimages.libreitalia.org
remote_host: ciccio
remote_path: /var/lib/nethserver/vhost/appimages
download: /var/tmp/downloads
force: false
sign: true
builds:
- query: prerelease
language: basic
offline_help: false
portable: false
- query: prerelease
language: basic
offline_help: true
portable: false
- query: prerelease
language: basic
offline_help: false
portable: true
- query: prerelease
language: basic
offline_help: true
portable: true
- query: prerelease
language: standard
offline_help: false
portable: false
- query: prerelease
language: standard
offline_help: true
portable: false
- query: prerelease
language: standard
offline_help: false
portable: true
- query: prerelease
language: standard
offline_help: true
portable: true
- query: prerelease
language: full
offline_help: false
portable: false
- query: prerelease
language: full
offline_help: true
portable: false
- query: prerelease
language: full
offline_help: false
portable: true
- query: prerelease
language: full
offline_help: true
portable: true

View File

@ -1,56 +0,0 @@
# vim:sts=4:sw=4
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[project]
name = "loaih"
dynamic = ["version"]
authors = [
{ name = "Emiliano Vavassori", email = "syntaxerrormmm@gmail.com" },
]
description = "LOAIH - LibreOffice AppImage Helpers, help build a LibreOffice AppImage"
readme = "README.md"
license = "MIT"
requires-python = ">= 3.6"
dependencies = [
"click",
"lddcollect",
"lxml",
"python-magic",
"pyyaml",
"requests"
]
classifiers = [
"Development Status :: 5 - Production/Stable",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: System Administrators",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: POSIX :: Linux",
"Topic :: Office/Business",
"Topic :: Software Development :: Build Tools",
"Topic :: Software Development :: Quality Assurance",
"Topic :: Software Development :: Testing",
"Topic :: Software Development :: User Interfaces"
]
[project.scripts]
loaih = "loaih.script:cli"
[project.urls]
Homepage = "https://git.libreitalia.org/LibreItalia/loaih/"
[tool.hatch.version]
path = "src/loaih/version.py"
[tool.hatch.build.targets.sdist]
include = [
"src/loaih",
]

89
scripts/loaih-build Normal file
View File

@ -0,0 +1,89 @@
#!/usr/bin/env python
# encoding: utf-8
import click
import yaml
import loaih
@click.command()
@click.option('-a', '--arch', 'arch', type=click.Choice(['x86', 'x86_64', 'all'], case_sensitive=False), default='all', help="Build the AppImage for a specific architecture. If there is no specific options, the process will build for both architectures (if available). Default: all")
@click.option('-c/-C', '--check/--no-check', 'check', default=True, help="Check in the final storage if the queried version is existent. Default: check")
@click.option('-d', '--download-path', 'download_path', default = '/var/tmp/downloads', type=str, help="Path to the download folder. Default: /var/tmp/downloads")
@click.option('-l', '--language', 'language', default = 'basic', type=str, help="Languages to be included. Options: basic, standard, full, a language string (e.g. 'it') or a list of languages comma separated (e.g.: 'en-US,en-GB,it'). Default: basic")
@click.option('-o/-O', '--offline-help/--no-offline-help', 'offline', default = False, help="Include or not the offline help for the chosen languages. Default: no offline help")
@click.option('-p/-P', '--portable/--no-portable', 'portable', default = False, help="Create a portable version of the AppImage or not. Default: no portable")
@click.option('-r', '--repo-path', 'repo_path', default = '/srv/http/appimage.sys42.eu', type=str, help="Path to the final storage of the AppImage. Default: /srv/http/appimage.sys42.eu")
@click.option('-s/-S', '--sign/--no-sign', 'sign', default=False, help="Wether to sign the build. Default: no-sign")
@click.option('-u/-U', '--updatable/--no-updatable', 'updatable', default = True, help="Create an updatable version of the AppImage or not. Default: updatable")
@click.argument('query')
def build(arch, language, offline, portable, updatable, download_path, repo_path, check, sign, query):
# Parsing options
arches = []
if arch.lower() == 'all':
# We need to build it twice.
arches = [ u'x86', u'x86_64' ]
else:
arches = [ arch.lower() ]
if query.endswith('.yml') or query.endswith('.yaml'):
# This is a buildfile. So we have to load the file and pass the build options ourselves.
config = {}
with open(query, 'r') as file:
config = yaml.safe_load(file)
# With the config file, we ignore all the command line options and set
# generic default.
for build in config['builds']:
# Loop a run for each build.
obj = loaih.Build(build['query'], arches)
# Configuration phase
obj.language = build['language']
obj.offline_help = build['offline_help']
obj.portable = build['portable']
obj.updatable = True
obj.storage_path = config['data']['repo'] if 'repo' in config['data'] and config['data']['repo'] else '/srv/http/appimage.sys42.eu'
obj.download_path = config['data']['download'] if 'download' in config['data'] and config['data']['download'] else '/var/tmp/downloads'
if 'sign' in config['data'] and config['data']['sign']:
obj.sign = True
# Build phase
obj.calculate()
if not 'force' in config['data'] or not config['data']['force']:
obj.check()
obj.download()
obj.build()
obj.checksums()
obj.publish()
del obj
else:
obj = loaih.Build(query, arches)
# Configuration phase
obj.language = language
obj.offline_help = offline
obj.portable = portable
obj.updatable = updatable
obj.storage_path = repo_path
obj.download_path = download_path
if sign:
obj.sign = True
# Running phase
obj.calculate()
if check:
obj.check()
obj.download()
obj.build()
obj.checksums()
obj.publish()
del obj
if __name__ == '__main__':
build()

34
scripts/loaih-getversion Normal file
View File

@ -0,0 +1,34 @@
#!/usr/bin/env python
# encoding: utf-8
import click
from loaih.versions import BuildVersion
import re, sys, json
@click.command()
@click.option('-o', '--output', default = 'rundeck', type=click.Choice(['rundeck', 'json', 'text' ], case_sensitive=False), help="Output format, defaulting to Rundeck Key/Value data format. Options: rundeck,json,text")
@click.argument('query')
def getversion(query, output):
b = BuildVersion(query)
if output.lower() == 'rundeck':
print("""RUNDECK:DATA: query = {query}
RUNDECK:DATA: version = {version}
RUNDECK:DATA: x86 = {x86_url}
RUNDECK:DATA: x86_64 = {x86_64_url}""".format(query = query, version = b.version, x86_url = b.basedirurl['x86'], x86_64_url = b.basedirurl['x86_64']))
elif output.lower() == 'json':
output = {
'query': query,
'version': b.version,
'basedirurl': b.basedirurl
}
print(json.dumps(output))
else:
print("""query: {query}
version: {version}
x86: {x86_url}
x86_64: {x86_64_url}""".format(query = query, version = b.version, x86_url = b.basedirurl['x86'], x86_64_url = b.basedirurl['x86_64']))
if __name__ == '__main__':
getversion()

18
setup.py Normal file
View File

@ -0,0 +1,18 @@
#!/usr/bin/env python
# encoding: utf-8
# vim:sts=4:sw=4
from setuptools import setup,find_packages
setup(
name="loaih",
version="1.1.0",
description="LOAIH - LibreOffice AppImage Helpers, help build a LibreOffice AppImage",
author="Emiliano Vavassori",
author_email="syntaxerrormmm@libreoffice.org",
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
scripts=[ 'scripts/loaih-getversion', 'scripts/loaih-build' ],
install_requires=[ 'click', ],
license='MIT',
url='https://git.libreitalia.org/LibreItalia/loappimage-helpers/',
)

View File

@ -1,261 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
"""machinery for compiling new versions of appimages."""
import datetime
import json
import re
import requests
import subprocess
import shlex
from lxml import html
# Constants
DOWNLOADPAGE = "https://www.libreoffice.org/download/download/"
ARCHIVE = "https://downloadarchive.documentfoundation.org/libreoffice/old/"
RELEASE = "https://download.documentfoundation.org/libreoffice/stable/"
DAILY = "https://dev-builds.libreoffice.org/daily/master/"
PRERELEASE = "https://dev-builds.libreoffice.org/pre-releases/deb/x86_64/"
SELECTORS = {
'still': {
'URL': DOWNLOADPAGE,
'xpath': '(//span[@class="dl_version_number"])[last()]/text()'
},
'fresh': {
'URL': DOWNLOADPAGE,
'xpath': '(//span[@class="dl_version_number"])[1]/text()'
},
'prerelease': {
'URL': DOWNLOADPAGE,
'xpath': '//p[@class="lead_libre"][last()]/following-sibling::ul[last()]/li/a/text()'
},
'daily': {
'URL': DAILY,
'xpath': '//td/a'
}
}
# Generic functions
def match_xpath(url: str, xpath: str):
"""Uses a couple of extensions to get results over webpage."""
resource = requests.get(url, timeout=10)
parsed = html.fromstring(resource.content)
return parsed.xpath(xpath)
# Classes
class Version():
"""Represent the skeleton of each queried version."""
def __init__(self):
self.query = ''
self.branch = ''
self.version = ''
self.urls = {
'x86': '-',
'x86_64': '-'
}
def appname(self):
"""Determines the app name based on the query branch determined."""
datematch = re.match(r'[0-9]{8}', self.query)
retval = 'LibreOffice'
if self.query in {'prerelease', 'daily', 'current', 'yesterday'} or datematch:
retval = 'LibreOfficeDev'
return retval
def cleanup_downloads(self, path, verbose=False) -> None:
"""Cleanups the downloads folder to assure new versions are built."""
search_name = self.appname() + '_' + self.version
cmd = f"find {path} -iname {search_name}\\*.tar.gz -delete"
if verbose:
subprocess.run(shlex.split(cmd), stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
else:
subprocess.run(shlex.split(cmd))
def to_dict(self):
"""Returns a dictionary of versions."""
return {
'query': self.query,
'version': self.version,
'basedirurl': self.urls
}
def to_json(self):
"""Returns a json representation of the version."""
return json.dumps(self.to_dict())
def __str__(self):
return f"""query: {self.query}
version: {self.version}
x86: {self.urls['x86']}
x86_64: {self.urls['x86_64']}"""
class QueryError(Exception):
"""Standard exception for errors regarding queries."""
class Solver():
"""Generic solver to call others."""
def __init__(self, text: str, default_to_current = False):
self.text = text
self.branch = text
self.version = None
self.default_to_current = default_to_current
self.baseurl = ARCHIVE
def solve(self):
"""Splits the query text possibilities, calling all the rest of the solvers."""
solver = self
if self.text in { 'current', 'yesterday', 'daily' }:
solver = DailySolver(self.text, self.default_to_current)
elif self.text in { 'still', 'fresh', 'prerelease' }:
solver = NamedSolver(self.text)
elif '.' in self.text:
solver = NumberedSolver(self.text)
else:
try:
int(self.text)
solver = DailySolver(self.text, self.default_to_current)
except ValueError:
raise QueryError("The queried version does not exist.")
self.version = solver.solve()
self.baseurl = solver.baseurl
return self.version
def to_version(self):
retval = Version()
retval.query = self.text
retval.branch = self.branch
retval.version = self.version
if retval.branch != 'daily' and retval.branch != 'prerelease':
retval.urls['x86_64'] = self.baseurl + 'x86_64/'
try:
x86ver = match_xpath(self.baseurl + 'x86/', '//td/a/text()')
except Exception:
return retval
if len(x86ver) > 1:
retval.urls['x86'] = self.baseurl + 'x86/'
else:
retval.urls['x86_64'] = self.baseurl
return retval
@staticmethod
def parse(text: str, default_to_current = False):
"""Calling the same as solver class."""
retval = Solver(text, default_to_current)
retval.solve()
return retval.to_version()
class DailySolver(Solver):
"""Specific solver to daily queries."""
def __init__(self, text: str, default_to_current = False):
super().__init__(text, default_to_current)
self.branch = 'daily'
self.baseurl = DAILY
def solve(self):
"""Get daily urls based on query."""
x = "//td/a[starts-with(text(),'Linux-rpm_deb-x86') and contains(text(),'TDF/')]/text()"
tinderbox_segment = match_xpath(self.baseurl, x)[-1]
self.baseurl = self.baseurl + tinderbox_segment
# Reiterate now to search for the dated version
xpath_query = "//td/a/text()"
daily_set = match_xpath(self.baseurl, xpath_query)
matching = ''
today = datetime.datetime.today()
try:
int(self.text)
matching = datetime.datetime.strptime(self.text, "%Y%m%d").strftime('%Y-%m-%d')
except ValueError:
# All textual version
if self.text in { 'current', 'daily' }:
matching = 'current'
elif self.text == 'yesterday':
matching = (today + datetime.timedelta(days=-1)).strftime("%Y-%m-%d")
results = sorted([ x for x in daily_set if matching in x ])
if len(results) == 0:
# No daily versions found.
if self.default_to_current:
solver = DailySolver('current')
self.version = solver.version
self.baseurl = solver.baseurl
else:
self.baseurl = self.baseurl + results[-1]
# baseurl for x86 is not available for sure on daily builds.
xpath_string = "//td/a[contains(text(), '_deb.tar.gz')]/text()"
links = match_xpath(self.baseurl, xpath_string)
if len(links) > 0:
link = str(links[-1])
self.version = link.rsplit('/', maxsplit=1)[-1].split('_')[1]
return self.version
class NamedSolver(Solver):
"""Solves the query knowing that the input is a named query."""
def __init__(self, text: str):
super().__init__(text)
self.branch = text
self.baseurl = SELECTORS[self.text]['URL']
self.generalver = ''
def solve(self):
"""Get versions from query."""
xpath_query = SELECTORS[self.text]['xpath']
results = sorted(match_xpath(self.baseurl, xpath_query))
if len(results) > 0:
self.generalver = str(results[-1])
result: str = self.generalver
xpath_string = f"//td/a[starts-with(text(),'{result}')]/text()"
archived_versions = sorted(match_xpath(ARCHIVE, xpath_string))
if len(archived_versions) == 0:
return self.version
# Return just the last versions
fullversion: str = str(archived_versions[-1])
self.baseurl = ARCHIVE + fullversion + 'deb/'
self.version = fullversion.rstrip('/')
if self.branch == 'prerelease':
self.baseurl = PRERELEASE
return self.version
class NumberedSolver(Solver):
"""Specific solver for numbered versions."""
def __init__(self, text: str):
super().__init__(text)
self.branch = '.'.join(text.split('.')[0-2])
def solve(self):
xpath_string = f"//td/a[starts-with(text(),'{self.text}')]/text()"
versions = sorted(match_xpath(self.baseurl, xpath_string))
if len(versions) == 0:
# It is possible that in the ARCHIVE there's no such version (might be a prerelease)
return self.version
version = str(versions[-1])
self.baseurl = self.baseurl + version + 'deb/'
self.version = version.rstrip('/')
return self.version

View File

@ -1,789 +0,0 @@
#!/usr/bin/env python3
# encoding: utf-8
"""Classes and functions to build an AppImage."""
import os
import datetime
import glob
import subprocess
import shutil
import re
import shlex
import tempfile
import hashlib
import requests
import magic
import loaih
class Collection(list):
"""Aggregates metadata on a collection of builds."""
def __init__(self, query, arch = ['x86', 'x86_64']):
"""Build a list of version to check/build for this round."""
super().__init__()
version = loaih.Solver.parse(query)
# If a version is not buildable, discard it now!
arch = [ x for x in arch if version.urls[x] != '-' ]
self.extend([ Build(version, ar) for ar in arch ])
class BuildException(Exception): pass
class Build():
"""Builds a single version."""
LANGSTD = [ 'ar', 'de', 'en-GB', 'es', 'fr', 'it', 'ja', 'ko', 'pt',
'pt-BR', 'ru', 'zh-CN', 'zh-TW' ]
LANGBASIC = [ 'en-GB' ]
ARCHSTD = [ 'x86', 'x86_64' ]
def __init__(self, version: loaih.Version, arch, debug=False):
self.debug = debug
self.version = version
self.tidy_folder = True
self.verbose = True
self.check_dependencies = False
self.arch = arch
self.short_version = str.join('.', self.version.version.split('.')[0:2])
self.branch_version = self.version.branch
self.url = self.version.urls[arch]
# Other default values - for structured builds
# Most likely will be overridden by cli
self.language = 'basic'
self.offline_help = False
self.portable = False
self.updatable = True
self.sign = True
self.repo_type = 'local'
self.remote_host = ''
self.remote_path = ''
self.storage_path = '/mnt/appimage'
self.download_path = '/var/tmp/downloads'
self.appnamedir = ''
# Specific build version
self.appname = self.version.appname()
self.appversion = ''
self.appimagedir = ''
self.appimagefilename = ''
self.zsyncfilename = ''
# Other variables by build
self.languagepart = '.' + self.language
self.helppart = ''
# Creating a tempfile
self.builddir = tempfile.mkdtemp()
self.tarballs = {}
self.found = False
self.built = False
# Preparing the default for the relative path on the storage for
# different versions.
# The path will evaluated as part of the check() function, as it is
# understood the storage_path can be changed before that phase.
self.relative_path = []
self.full_path = ''
self.baseurl = ''
def calculate(self):
"""Calculate exclusions and other variables."""
if self.verbose:
print("--- Preliminary Phase ---")
if self.debug and isinstance(shutil.which('apt'), str):
# APT is found in path. We assume we can find dependencies.
self.check_dependencies = True
if self.verbose:
print("Updating system packages cache.")
# Updating package cache
subprocess.run(['sudo', 'apt', 'update'], check=True, stdout=subprocess.DEVNULL)
if self.verbose:
print("Ensuring apt-file is installed and updated.")
# Updating apt-file cache
subprocess.run(['sudo', 'apt', 'install', 'apt-file', '-y'], check=True, stdout=subprocess.DEVNULL)
subprocess.run(['sudo', 'apt-file', 'update'], check=True, stdout=subprocess.DEVNULL)
else:
print("CAUTION: your system seems not to include a working version of apt.\nThis will cause the AppImage to leverage system libraries when run.")
self.check_dependencies = False
if self.verbose:
print("--- Calculate Phase ---")
# let's check here if we are on a remote repo or local.
if self.storage_path.startswith("http"):
# Final repository is remote
self.repo_type = 'remote'
if self.verbose:
print("Repo is remote.")
else:
self.repo_type = 'local'
if self.verbose:
print("Repo is local.")
# Calculating languagepart
self.languagepart = "."
if ',' in self.language:
self.languagepart += self.language.replace(',', '-')
else:
self.languagepart += self.language
# Calculating help part
if self.offline_help:
self.helppart = '.help'
# Building the required names
self.appimagefilename = self.__gen_appimagefilename__()
self.zsyncfilename = self.appimagefilename + '.zsync'
# Mandate to the private function to calculate the full_path available
# for the storage and the checks.
self.__calculate_full_path__()
def check(self):
"""Checking if the requested AppImage has been already built."""
if self.branch_version == 'daily':
# Daily versions have to be downloaded and built each time; no
# matter if another one is already present.
return
if self.verbose:
print("--- Check Phase ---")
if len(self.appimagefilename) == 0:
self.calculate()
if self.verbose:
print(f"Searching for {self.appimagefilename}")
# First, check if by metadata the repo is remote or not.
if self.repo_type == 'remote':
# Remote storage. I have to query a remote site to know if it
# was already built.
name = self.appimagefilename
url = self.storage_path.rstrip('/') + self.full_path + '/'
try:
if len(loaih.match_xpath(url, f"//a[contains(@href,'{name}')]/@href")) > 0:
# Already built.
self.found = True
except Exception:
# The URL specified do not exist. So it is to build.
self.found = False
else:
# Repo is local
command = f"find {self.full_path} -name {self.appimagefilename}"
res = subprocess.run(shlex.split(command),
capture_output=True,
env={ "LC_ALL": "C" },
text=True, encoding='utf-8', check=True)
if res.stdout and len(res.stdout.strip("\n")) > 0:
# All good, the command was executed fine.
self.found = True
if self.found:
if self.verbose:
print(f"Found requested AppImage: {self.appimagefilename}.")
def download(self, compact=False):
"""Downloads the contents of the URL as it was a folder."""
if self.verbose:
print("--- Download Phase ---")
if self.found:
return
if self.verbose:
print(f"Started downloads for {self.version.version}. Please wait.")
# Checking if a valid path has been provided
if self.url == '-':
if self.verbose:
print(f"Cannot build for arch {self.arch}. Continuing with other arches.")
# Faking already built it so to skip other checks.
self.found = True
# Identifying downloads
self.tarballs = [ x for x in loaih.match_xpath(self.url, "//td/a/text()") if x.endswith('tar.gz') and 'deb' in x and self.version.version in x ]
self.download_tarballs = []
# Issue #5: manage a limited number of downloads and not the full set.
if compact:
self.download_tarballs = self.__select_tarballs__()
else:
self.download_tarballs = self.tarballs
# Create and change directory to the download location
os.makedirs(self.download_path, exist_ok = True)
os.chdir(self.download_path)
for archive in self.download_tarballs:
# If the archive is already there, do not do anything.
if os.path.exists(archive):
continue
# Download the archive
try:
self.__download_archive_debug__(archive)
except Exception as error:
print(f"Failed to download {archive}: {error}.")
if self.verbose:
print(f"Finished downloads for {self.version.version}.")
def build(self):
"""Building all the versions."""
if self.found:
return
if self.verbose:
print("--- Building Phase ---")
# Preparation tasks
self.appnamedir = os.path.join(self.builddir, self.appname)
os.makedirs(self.appnamedir, exist_ok=True)
# And then cd to the appname folder.
os.chdir(self.appnamedir)
# Download appimagetool from github
appimagetoolurl = r"https://github.com/AppImage/AppImageKit/releases/"
appimagetoolurl += f"download/continuous/appimagetool-{self.arch}.AppImage"
self.__download__(appimagetoolurl, 'appimagetool')
os.chmod('appimagetool', 0o755)
# Build the requested version.
self.__unpackbuild__()
self.__prepare_contents__()
if self.check_dependencies:
if self.verbose:
print("Searching for dependent libraries, it might take a while.")
self.__missing_dependencies__()
self.__finalize_build__()
def checksums(self):
"""Create checksums of the built versions."""
# Skip checksum if initally the build was already found in the storage directory
if self.verbose:
print("--- Checksum Phase ---")
if self.found:
return
os.chdir(self.appnamedir)
if self.built:
for item in [ self.appimagefilename, self.zsyncfilename ]:
itempath = os.path.join(self.appnamedir, item)
if os.path.exists(itempath):
self.__create_checksum__(item)
def publish(self):
"""Moves built versions to definitive storage."""
if self.verbose:
print("--- Publish Phase ---")
if self.found:
# All files are already present in the full_path
return
os.chdir(self.appnamedir)
# Two cases here: local and remote storage_path.
if self.repo_type == 'remote':
# Remote first.
# Build destination directory
remotepath = self.remote_path.rstrip('/') + self.full_path
try:
if self.verbose:
subprocess.run(
r"rsync -rlIvz --munge-links *.AppImage* " +
f"{self.remote_host}:{remotepath}",
cwd=self.appnamedir, shell=True, check=True
)
else:
subprocess.run(
r"rsync -rlIvz --munge-links *.AppImage* " +
f"{self.remote_host}:{remotepath}",
cwd=self.appnamedir, shell=True, check=True,
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL
)
finally:
pass
else:
# Local
# Forcing creation of subfolders, in case there is a new build
os.makedirs(self.full_path, exist_ok = True)
for file in glob.glob("*.AppImage*"):
subprocess.run(shlex.split(
f"cp -f {file} {self.full_path}"
), check=True)
def generalize_and_link(self, chdir = 'default'):
"""Creates the needed generalized files if needed."""
if self.verbose:
print("--- Generalize and Link Phase ---")
# If called with a pointed version, no generalize and link necessary.
if not self.branch_version:
return
# If a prerelease or a daily version, either.
if self.version.query in { 'daily', 'prerelease' }:
return
if chdir == 'default':
chdir = self.full_path
appimagefilename = r''
zsyncfilename = r''
# Creating versions for short version and query text
versions = [ self.short_version, self.branch_version ]
os.chdir(chdir)
# if the appimage for the reported arch is not found, skip to next
# arch
if not os.path.exists(self.appimagefilename):
return
# Doing it both for short_name and for branchname
for version in versions:
appimagefilename = f"{self.appname}-{version}"
appimagefilename += f"{self.languagepart}{self.helppart}"
appimagefilename += f'-{self.arch}.AppImage'
zsyncfilename = appimagefilename + '.zsync'
# Create the symlink
if self.verbose:
print(f"Creating {appimagefilename} and checksums.")
if os.path.exists(appimagefilename):
os.unlink(appimagefilename)
os.symlink(self.appimagefilename, appimagefilename)
# Create the checksum for the AppImage
self.__create_checksum__(appimagefilename)
# Do not continue if no zsync file is provided.
if not self.updatable:
continue
if self.verbose:
print(f"Creating zsync file for version {version}.")
if os.path.exists(zsyncfilename):
os.unlink(zsyncfilename)
shutil.copyfile(self.zsyncfilename, zsyncfilename)
# Editing the zsyncfile
subprocess.run(shlex.split(
r"sed --in-place 's/^Filename:.*$/Filename: " +
f"{appimagefilename}/' {zsyncfilename}"
), check=True)
self.__create_checksum__(zsyncfilename)
### Private methods ###
def __gen_appimagefilename__(self):
"""Generalize the construction of the name of the app."""
self.appversion = self.version.version + self.languagepart + self.helppart
return self.appname + f'-{self.appversion}-{self.arch}.AppImage'
def __calculate_full_path__(self):
"""Calculate relative path of the build, based on internal other variables."""
if len(self.relative_path) == 0:
if self.tidy_folder:
if self.branch_version == 'daily':
self.relative_path.append('daily')
elif self.branch_version == 'prerelease':
self.relative_path.append('prerelease')
# Not the same check, an additional one
if self.portable:
self.relative_path.append('portable')
# Fullpath might be intended two ways:
if self.repo_type == 'remote':
# Repository is remote
# we build full_path as it is absolute to the root of the
# storage_path.
self.full_path = '/'
if len(self.relative_path) >= 1:
self.full_path += str.join('/', self.relative_path)
else:
# Repository is local
# If it is remote or if it is local
fullpath_arr = self.storage_path.split('/')
# Joining relative path only if it is not null
if len(self.relative_path) > 0:
fullpath_arr.extend(self.relative_path)
self.full_path = re.sub(r"/+", '/', str.join('/', fullpath_arr))
if not os.path.exists(self.full_path):
os.makedirs(self.full_path, exist_ok = True)
def __create_checksum__(self, file):
"""Internal function to create checksum file."""
retval = hashlib.md5()
with open(file, 'rb') as rawfile:
while True:
buf = rawfile.read(2**20)
if not buf:
break
retval.update(buf)
with open(f"{file}.md5", 'w', encoding='utf-8') as checkfile:
checkfile.write(f"{retval.hexdigest()} {os.path.basename(file)}")
def __download_archive__(self, archive) -> str:
return self.__download__(self.url, archive)
def __download_archive_debug__(self, archive) -> str:
"""Analyses the downloaded archive to prevent issues with unpacking."""
break_control = 0
testedfilename = ""
while break_control <= 5:
timenow = datetime.datetime.now()
testedfilename, resource = self.__download_debug__(self.url, archive)
mime = magic.Magic(mime=True)
mimetype = mime.from_file(testedfilename)
if mimetype == 'application/gzip':
return testedfilename
# On the contrary, we will dump a logfile, remove the download and
# redo the download.
with open(os.path.join(self.download_path, 'downloadfailure.log'), 'a') as logfile:
logfile.write(f"{timenow.isoformat()};{resource.url};{mimetype}\n")
os.unlink(testedfilename)
break_control += 1
# If it arrives here, 5 attempts to download the archive have failed.
raise BuildException(f"All downloads failed for {archive}. Exiting.")
def __download__(self, url: str, filename: str):
basename = filename
if '/' in filename:
basename = filename.split('/')[-1]
full_url = url
if url.endswith('/'):
# URL has to be completed with basename of filename
full_url = url + basename
with requests.get(full_url, stream=True, timeout=10) as resource:
resource.raise_for_status()
with open(filename, 'wb') as file:
for chunk in resource.iter_content(chunk_size=8192):
file.write(chunk)
return filename
def __download_debug__(self, url: str, filename: str) -> tuple[str, requests.Response]:
basename = filename
if '/' in filename:
basename = filename.split('/')[-1]
full_url = url
if url.endswith('/'):
# URL has to be completed with basename of filename
full_url = url + basename
with requests.get(full_url, stream=True, timeout=10) as resource:
resource.raise_for_status()
with open(filename, 'wb') as file:
for chunk in resource.iter_content(chunk_size=8192):
file.write(chunk)
return filename, resource
def __select_tarballs__(self):
retval = [ self.tarballs[0] ]
# Let's process standard languages and append results to the
# buildtarball
if self.language == 'basic':
if self.offline_help:
retval.extend([ x for x in self.tarballs if 'pack_en-GB' in x ])
else:
retval.extend([ x for x in self.tarballs if 'langpack_en-GB' in x])
elif self.language == 'standard':
for lang in Build.LANGSTD:
if self.offline_help:
retval.extend([ x for x in self.tarballs if 'pack_' + lang in x ])
else:
retval.extend([ x for x in self.tarballs if 'langpack_' + lang in x ])
elif self.language == 'full':
if self.offline_help:
# We need also all help. Let's replace buildtarball with the
# whole bunch
retval = self.tarballs
else:
retval.extend([ x for x in self.tarballs if 'langpack' in x ])
else:
# Looping for each language in self.language
for lang in self.language.split(","):
if self.offline_help:
retval.extend([ x for x in self.tarballs
if 'pack' + lang in x ])
else:
retval.extend([ x for x in self.tarballs
if 'langpack' + lang in x ])
return retval
def __unpackbuild__(self):
# We start by filtering out tarballs from the list
buildtarballs = self.__select_tarballs__()
os.chdir(self.appnamedir)
# Unpacking the tarballs
if self.verbose:
print("---- Unpacking ----")
for archive in buildtarballs:
subprocess.run(shlex.split(
f"tar xzf {self.download_path}/{archive}"), check=True)
def __prepare_contents__(self):
# create appimagedir
if self.verbose:
print("---- Preparing the build ----")
self.appimagedir = os.path.join(self.appnamedir, self.appname + '.AppDir')
os.makedirs(self.appimagedir, exist_ok = True)
# At this point, let's decompress the deb packages
if self.verbose:
print("Unpacking main archives")
subprocess.run(shlex.split(
r"find .. -iname '*.deb' -exec dpkg -x {} . \;"
), cwd=self.appimagedir, check=True)
if self.portable:
subprocess.run(shlex.split(
r"find . -type f -iname 'bootstraprc' " +
r"-exec sed -i 's|^UserInstallation=.*|" +
r"UserInstallation=\$SYSUSERCONFIG/libreoffice/%s|g' {} \+" % self.short_version
), cwd=self.appimagedir, check=True)
# Changing desktop file
if self.verbose:
print("Preparing .desktop file.")
subprocess.run(shlex.split(
r"find . -iname startcenter.desktop -exec cp {} . \;"
), cwd=self.appimagedir, check=True)
subprocess.run(shlex.split(
f"sed --in-place \'s:^Name=.*$:Name={self.appname}:\' " +
r"startcenter.desktop"
), cwd=self.appimagedir, check=False)
if self.verbose:
print("Preparing icon file.")
subprocess.run(shlex.split(
r"find . -name '*startcenter.png' -path '*hicolor*48x48*' " +
r"-exec cp {} . \;"
), cwd=self.appimagedir, check=True)
# Finding path to main executable
cmd = subprocess.run(shlex.split(
r"find -iname soffice.bin -print"
), cwd=self.appimagedir, check = True, capture_output=True)
self.main_executable = os.path.abspath(os.path.join(
self.appimagedir,
cmd.stdout.strip().decode('utf-8')))
# Find the name of the binary called in the desktop file.
binaryname = ''
with open(
os.path.join(self.appimagedir, 'startcenter.desktop'),
'r', encoding="utf-8"
) as desktopfile:
for line in desktopfile.readlines():
if re.match(r'^Exec', line):
binaryname = line.split('=')[-1].split(' ')[0]
# Esci al primo match
break
#binary_exec = subprocess.run(shlex.split(r"awk 'BEGIN { FS = \"=\" } /^Exec/ { print $2; exit }' startcenter.desktop | awk '{ print $1 }'"), cwd=self.appimagedir, text=True, encoding='utf-8')
#binaryname = binary_exec.stdout.strip("\n")
# Creating a soft link so the executable in the desktop file is present
bindir=os.path.join(self.appimagedir, 'usr', 'bin')
os.makedirs(bindir, exist_ok = True)
subprocess.run(shlex.split(
r"find ../../opt -iname soffice.bin -path '*program*' " +
r"-exec ln -sf {} ./%s \;" % binaryname
), cwd=bindir, check=True)
def __missing_dependencies__(self):
"""Finds and copy in the appimagedir any missing libraries."""
# If the system permits it, we leverage lddcollect
# to find the packages that contain .so dependencies in the main build.
import lddcollect
# We first process the ELF
raw = lddcollect.process_elf(self.main_executable, verbose = False, dpkg = True)
# If all works as expected, we obtain a tuple of:
# (debian_packages, all_libraries, files_not_found)
debian_packages = raw[0]
not_found = raw[2]
if len(debian_packages) != 0:
# Creating temporary folders
debs = [ x.split(':')[0] for x in debian_packages ]
downloadpath = os.path.abspath(os.path.join(self.builddir, 'dependencies'))
os.makedirs(downloadpath)
if self.verbose:
print("Downloading missing dependencies, please wait.")
# Let's try to find and install also other libraries
additional = list(dict.fromkeys([ Helpers.lib_to_deb(x) for x in not_found ]))
debs.extend(additional)
# It seems the download command does not download dependencies of
# the packages.
if self.verbose:
print("Constructing the dependency tree.")
for deb in debian_packages:
debs.extend(Helpers.deb_dependencies(deb))
# Re-cleaning up the dependency tree
debs = list(dict.fromkeys(debs))
# We download the missing dependencies leveraging apt
subprocess.run(shlex.split(
r"apt download " + " ".join(debs)
), cwd=downloadpath, check=True)
# then we install them inside a temporary path
temporary = os.path.abspath(os.path.join(downloadpath, 'temp'))
os.makedirs(temporary)
subprocess.run(shlex.split(
r"find " + downloadpath + r" -iname \*.deb -exec dpkg -x {} " + temporary + r" \;"
), cwd=self.builddir, check=True)
# We are finally copying the .so files in the same path as main_executable
libdirs = [ 'lib/x86_64-linux-gnu', 'usr/lib/x86_64-linux-gnu' ]
for libdir in libdirs:
fulllibdir = os.path.abspath(os.path.join(temporary, libdir))
subprocess.run(shlex.split(
f"cp -Ra {fulllibdir}/. {os.path.dirname(self.main_executable)}/"
), cwd=temporary, check=True)
if self.debug:
with open(os.path.abspath(os.path.join(self.storage_path, 'dependencies.lst')), 'w', encoding="utf-8") as deplist:
deplist.write("\n".join(debs))
def __finalize_build__(self):
if self.verbose:
print("Finalizing build...")
# Cleaning up AppDir
cleanup_dirs = [ 'etc', 'lib', 'lib64', 'usr/lib', 'usr/local' ]
for local in cleanup_dirs:
shutil.rmtree(os.path.abspath(os.path.join(self.appimagedir, local)), ignore_errors=True)
# Download AppRun from github
apprunurl = r"https://github.com/AppImage/AppImageKit/releases/"
apprunurl += f"download/continuous/AppRun-{self.arch}"
dest = os.path.join(self.appimagedir, 'AppRun')
self.__download__(apprunurl, dest)
os.chmod(dest, 0o755)
# Dealing with extra options
buildopts = []
if self.sign:
buildopts.append('--sign')
# adding zsync build if updatable
if self.updatable:
buildopts.append(f"-u 'zsync|{self.zsyncfilename}'")
buildopts_str = str.join(' ', buildopts)
# Build the number-specific build
if self.verbose:
print("---- Start building ----")
subprocess.run(shlex.split(
f"{self.appnamedir}/appimagetool {buildopts_str} -v " +
f"{self.appimagedir}"
), env={ "VERSION": self.appversion }, check=True)
print("---- End building ----")
else:
subprocess.run(shlex.split(
f"{self.appnamedir}/appimagetool {buildopts_str} -v " +
f"{self.appimagedir}"
), env={ "VERSION": self.appversion }, stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL, check=True)
if self.verbose:
print(f"Built AppImage version {self.appversion}")
# Cleanup phase, before new run.
for deb in glob.glob(self.appnamedir + '/*.deb'):
os.remove(deb)
subprocess.run(shlex.split(
r"find . -mindepth 1 -maxdepth 1 -type d -exec rm -rf {} \+"
), check=True)
self.built = True
def __del__(self):
"""Destructor"""
if not self.debug:
# Cleaning up build directory
shutil.rmtree(self.builddir)
class Helpers:
@staticmethod
def deb_dependencies(package_name):
"""Returns the array of the dependencies of that package."""
# First pass: find dependency of that package in raw output
pass1 = subprocess.Popen(shlex.split(
f"apt-cache depends --recurse --no-recommends --no-suggests --no-conflicts --no-breaks --no-replaces --no-enhances --no-pre-depends {package_name}"
), stdout=subprocess.PIPE)
# Second pass: only grep interesting lines.
pass2 = subprocess.Popen(shlex.split(
r"grep '^\w'"
), stdin=pass1.stdout, stdout=subprocess.PIPE, encoding='utf-8')
stdout = pass2.communicate()[0]
return stdout.strip().split("\n")
@staticmethod
def lib_to_deb(libraryname):
"""Uses system tools to identify the missing package."""
libsearch = subprocess.run(shlex.split(
f"sudo apt-file find -lx {libraryname}$"
), check=True, capture_output=True)
candidate = [ x for x in libsearch.stdout.decode('utf-8').split('\n') if 'lib' in x ][0]
return candidate

View File

@ -1,221 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
"""Helps with command line commands."""
import os
import shutil
import sys
import json
import click
import yaml
import loaih
import loaih.version
import loaih.build
@click.group()
@click.version_option(loaih.version.version)
def cli():
"""Helps with command line commands."""
@cli.command()
@click.option('-j', '--json', 'jsonout', default=False, is_flag=True, help="Output format in json.")
@click.option('--default-to-current', '-d', is_flag=True, default=False, help="If no versions are found, default to current one (for daily builds). Default: do not default to current.")
@click.argument('query')
def getversion(query, jsonout, default_to_current):
"""Get download information for named or numbered versions."""
batchlist = []
queries = []
if ',' in query:
queries.extend(query.split(','))
else:
queries.append(query)
for singlequery in queries:
elem = loaih.Solver.parse(singlequery, default_to_current)
if elem.version not in { None, "" }:
batchlist.append(elem)
if len(batchlist) > 0:
if jsonout:
click.echo(json.dumps([x.to_dict() for x in batchlist ]))
else:
for value in batchlist:
click.echo(value)
@cli.command()
@click.option('-a', '--arch', 'arch', default='x86_64',
type=click.Choice(['x86', 'x86_64', 'all'], case_sensitive=False), help="Build the AppImage for a specific architecture. Default: x86_64")
@click.option('--check', '-c', is_flag=True, default=False, help="Checks in the repository path if the queried version is existent. Default: do not check")
@click.option('--checksums', '-e', is_flag=True, default=False, help="Create checksums for each created file (AppImage). Default: do not create checksums.")
@click.option('--keep-downloads', '-k', 'keep', is_flag=True, default=False, help="Keep the downloads folder after building the AppImage. Default: do not keep.")
@click.option('--languages', '-l', 'language', default='basic', type=str, help="Languages to be included. Options: basic, standard, full, a language string (e.g. 'it') or a list of languages comma separated (e.g.: 'en-US,en-GB,it'). Default: basic")
@click.option('--offline-help', '-o', 'offline', is_flag=True, default=False, help="Include the offline help pages for the chosen languages. Default: no offline help")
@click.option('--portable', '-p', 'portable', is_flag=True, default=False, help="Create a portable version of the AppImage or not. Default: no portable")
@click.option('--sign', '-s', is_flag=True, default=False, help="Sign the build with your default GPG key. Default: do not sign")
@click.option('--updatable', '-u', is_flag=True, default=False, help="Create an updatable AppImage (compatible with zsync2). Default: not updatable")
@click.option('--download-path', '-d', default='./downloads', type=str, help="Path to the download folder. Default: ./downloads")
@click.option('--repo-path', '-r', default='.', type=str, help="Path to the final storage of the AppImage. Default: current directory")
@click.option('--debug', 'debug', is_flag=True, default=False, help="Activate debug options.")
@click.argument('query')
def build(arch, language, offline, portable, updatable, download_path, repo_path, check, checksums, sign, keep, query, debug):
"""Builds an Appimage with the provided options."""
# Multiple query support
queries = []
if ',' in query:
queries.extend(query.split(','))
else:
queries.append(query)
# Parsing options
arches = []
if arch.lower() == 'all':
# We need to build it twice.
arches = ['x86', 'x86_64']
else:
arches = [arch.lower()]
# Other more global variables
repopath = os.path.abspath(repo_path)
if not os.path.exists(repopath):
os.makedirs(repopath, exist_ok=True)
downloadpath = os.path.abspath(download_path)
if not os.path.exists(downloadpath):
os.makedirs(downloadpath, exist_ok=True)
for myquery in queries:
for appbuild in loaih.build.Collection(myquery, arches):
# Configuration phase
appbuild.debug = debug
appbuild.tidy_folder = False
appbuild.language = language
appbuild.offline_help = offline
appbuild.portable = portable
appbuild.updatable = updatable
appbuild.storage_path = repopath
appbuild.download_path = downloadpath
appbuild.sign = sign
# Running phase
appbuild.calculate()
if check:
appbuild.check()
appbuild.download(compact = True)
appbuild.build()
if checksums:
appbuild.checksums()
appbuild.publish()
del appbuild
if not keep:
shutil.rmtree(downloadpath)
@cli.command()
@click.option("--verbose", '-v', is_flag=True, default=False, help="Show building phases.", show_default=True)
@click.argument("yamlfile")
def batch(yamlfile, verbose):
"""Builds a collection of AppImages based on YAML file."""
# Defaults for a batch building is definitely more different than a
# manual one. To reflect this behaviour, I decided to split the commands
# between batch (bulk creation) and build (manual building).
# Check if yamlfile exists.
if not os.path.exists(os.path.abspath(yamlfile)):
click.echo(f"YAML file {yamlfile} does not exists or is unreadable.")
sys.exit(1)
# This is a buildfile. So we have to load the file and pass the build
# options ourselves.
config = {}
with open(os.path.abspath(yamlfile), 'r', encoding='utf-8') as file:
config = yaml.safe_load(file)
# Globals for yamlfile
gvars = {}
gvars['download_path'] = "/var/tmp/downloads"
if 'download' in config['data'] and config['data']['download']:
gvars['download_path'] = config['data']['download']
gvars['force'] = False
if 'force' in config['data'] and config['data']['force']:
gvars['force'] = config['data']['force']
gvars['storage_path'] = "/srv/http/appimage"
if 'repo' in config['data'] and config['data']['repo']:
gvars['storage_path'] = config['data']['repo']
gvars['remoterepo'] = False
gvars['remote_host'] = ''
gvars['remote_path'] = "/srv/http/appimage"
if 'http' in gvars['storage_path']:
gvars['remoterepo'] = True
gvars['remote_host'] = "ciccio.libreitalia.org"
if 'remote_host' in config['data'] and config['data']['remote_host']:
gvars['remote_host'] = config['data']['remote_host']
if 'remote_path' in config['data'] and config['data']['remote_path']:
gvars['remote_path'] = config['data']['remote_path']
gvars['sign'] = False
if 'sign' in config['data'] and config['data']['sign']:
gvars['sign'] = True
# With the config file, we ignore all the command line options and set
# generic default.
for cbuild in config['builds']:
# Loop a run for each build.
collection = loaih.build.Collection(cbuild['query'])
for obj in collection:
# Configuration phase
obj.verbose = verbose
obj.language = 'basic'
if 'language' in cbuild and cbuild['language']:
obj.language = cbuild['language']
obj.offline_help = False
if 'offline_help' in cbuild and cbuild['offline_help']:
obj.offline_help = cbuild['offline_help']
obj.portable = False
if 'portable' in cbuild and cbuild['portable']:
obj.portable = cbuild['portable']
obj.updatable = True
obj.storage_path = gvars['storage_path']
obj.download_path = gvars['download_path']
obj.remoterepo = gvars['remoterepo']
obj.remote_host = gvars['remote_host']
obj.remote_path = gvars['remote_path']
obj.sign = gvars['sign']
# Build phase
obj.calculate()
if not gvars['force']:
obj.check()
obj.download()
obj.build()
obj.checksums()
if obj.remoterepo and obj.appnamedir:
obj.generalize_and_link(obj.appnamedir)
obj.publish()
if not obj.remoterepo:
obj.generalize_and_link()
del obj
# In case prerelease or daily branches are used, cleanup the download
# folder after finishing the complete run (to make sure the next run
# will redownload all the needed files and is indeed fresh).
# we will swipe all the builds inside a collection to understand the files
# to delete.
for cbuild in config['builds']:
# Loop a run for each build.
for build in loaih.build.Collection(cbuild['query']):
if build.version.branch in {'prerelease', 'daily'}:
build.version.cleanup_downloads(gvars['download_path'], verbose)

View File

@ -1,4 +0,0 @@
#!/usr/bin/env python3
# encoding: utf-8
version = "1.4.0rc1"

View File

@ -1,69 +1,67 @@
---
data:
repo: https://appimages.libreitalia.org
remote_host: ciccio
remote_path: /var/lib/nethserver/vhost/appimages
repo: /srv/http/appimage.sys42.eu
download: /var/tmp/downloads
force: false
sign: true
force: no
sign: yes
builds:
- query: still
language: basic
offline_help: false
portable: false
offline_help: no
portable: no
- query: still
language: basic
offline_help: true
portable: false
offline_help: yes
portable: no
- query: still
language: basic
offline_help: false
portable: true
offline_help: no
portable: yes
- query: still
language: basic
offline_help: true
portable: true
offline_help: yes
portable: yes
- query: still
language: standard
offline_help: false
portable: false
offline_help: no
portable: no
- query: still
language: standard
offline_help: true
portable: false
offline_help: yes
portable: no
- query: still
language: standard
offline_help: false
portable: true
offline_help: no
portable: yes
- query: still
language: standard
offline_help: true
portable: true
offline_help: yes
portable: yes
- query: still
language: full
offline_help: false
portable: false
offline_help: no
portable: no
- query: still
language: full
offline_help: true
portable: false
offline_help: yes
portable: no
- query: still
language: full
offline_help: false
portable: true
offline_help: no
portable: yes
- query: still
language: full
offline_help: true
portable: true
offline_help: yes
portable: yes

View File

@ -1,14 +0,0 @@
---
data:
repo: https://appimages.libreitalia.org
remote_host: ciccio
remote_path: /var/lib/nethserver/vhost/appimages
download: /var/tmp/downloads
force: true
sign: true
builds:
- query: 7.2.3
language: basic
offline_help: false
portable: false