Rinominati file all'interno del progetto.

This commit is contained in:
Emiliano Vavassori 2022-04-30 17:46:46 +02:00
parent c30407a244
commit 15369c0895
4 changed files with 550 additions and 550 deletions

View File

@ -1,367 +1,195 @@
#!/usr/bin/env python3 #!/usr/bin/env python
# encoding: utf-8
import urllib.request import urllib.request
import loaih.versions as versions
from lxml import etree from lxml import etree
import tempfile, os, sys, glob, subprocess, shutil, re, shlex from packaging.version import parse as parse_version
import datetime
class Build(object): class Definitions(object):
LANGSTD = [ 'ar', 'de', 'en-GB', 'es', 'fr', 'it', 'ja', 'ko', 'pt', 'pt-BR', 'ru', 'zh-CN', 'zh-TW' ] DOWNLOADPAGE = "https://www.libreoffice.org/download/download/"
LANGBASIC = [ 'en-GB' ] ARCHIVE = "https://downloadarchive.documentfoundation.org/libreoffice/old/"
ARCHSTD = [ u'x86', u'x86_64' ] RELEASE = "https://download.documentfoundation.org/libreoffice/stable/"
DAILY = "https://dev-builds.libreoffice.org/daily/master/Linux-rpm_deb-x86_64@tb87-TDF/"
PRERELEASE = "https://dev-builds.libreoffice.org/pre-releases/deb/x86_64/"
def __init__(self, query, arch): SELECTORS = {
"""Build all versions that can be found in the indicated repo.""" 'still': {
'URL': DOWNLOADPAGE,
'xpath': '(//span[@class="dl_version_number"])[last()]/text()'
},
'fresh': {
'URL': DOWNLOADPAGE,
'xpath': '(//span[@class="dl_version_number"])[1]/text()'
},
'prerelease': {
'URL': DOWNLOADPAGE,
'xpath': '//p[@class="lead_libre"][last()]/following-sibling::ul[last()]/li/a/text()'
},
'daily': {
'URL': DAILY,
'xpath': '//td/a'
}
}
class Base(object):
# Class for static methods which might be useful even outside the build
# scripts.
@staticmethod
def dailyurl(date = datetime.datetime.today()):
"""Returns the URL for the latest valid daily build."""
# As per other parts of the build, we need to maintain an URL also for
# x86 versions that it isn't really provided.
# As such, the return value must be a dictionary
# Get the anchor for today's builds
a = etree.HTML(urllib.request.urlopen(Definitions.DAILY).read()).xpath("//td/a[contains(text(), '" + date.strftime('%Y-%m-%d') + "')]/text()")
if len(a) == 0:
# No results found, no version found, let's return a
return { 'x86': '-', 'x86_64': '-' }
# On the contrary, more than a version is found. let's order the
# list and get the latest item
return { 'x86': '-', 'x86_64': Definitions.SELECTORS['daily']['URL'] + sorted(a)[-1] }
@staticmethod
def dailyver(date = datetime.datetime.today()):
"""Returns versions present on the latest daily build."""
url = Base.dailyurl(date)['x86_64']
# If no daily releases has been provided yet, return empty
if url == '-':
return []
# Rerun the page parsing, this time to find out the versions built
b = etree.HTML(urllib.request.urlopen(url).read()).xpath("//td/a[contains(text(), '_deb.tar.gz')]/text()")
# This should have returned the main package for a version, but can
# have returned multiple ones, so let's treat it as a list
return [ x.split('_')[1] for x in b ]
@staticmethod
def namedver(query):
"""Gets the version for a specific named version."""
if query == 'daily' or query == 'yesterday':
# Daily needs double parsing for the same result to apply.
# We first select today's build anchor:
date = datetime.datetime.today()
if query == 'yesterday':
# Use yesterdays' date for testing purposes.
date += datetime.timedelta(days=-1)
return Base.dailyver(date)
# In case the query isn't for daily
return etree.HTML(urllib.request.urlopen(Definitions.SELECTORS[query]['URL']).read()).xpath(Definitions.SELECTORS[query]['xpath'])
@staticmethod
def fullversion(version):
"""Get latest full version from Archive based on partial version."""
versionlist = etree.HTML(urllib.request.urlopen(Definitions.ARCHIVE).read()).xpath(f"//td/a[starts-with(text(), '{version}')]/text()")
cleanlist = sorted([ x.strip('/') for x in versionlist ])
# Sorting, then returning the last version
return cleanlist[-1]
@staticmethod
def urlfromqueryandver(query, version):
"""Returns the fetching URL based on the queried version and the numeric version of it."""
# This has the purpose to simplify and explain how the releases are
# layed out.
# If the query tells about daily or 'yesterday' (for testing purposes),
# we might ignore versions and return the value coming from dailyurl:
if query == 'daily':
return Base.dailyurl()
if query == 'yesterday':
date = datetime.datetime.today() + datetime.timedelta(days=-1)
return Base.dailyurl(date)
# All other versions will be taken from Archive, as such we need a full
# version.
# If the version has only 2 points in it (or splits into three parts by '.'), that's not a full version and we will call the getlatestver() function
fullversion = version
if len(version.split('.')) <= 3:
fullversion = Base.fullversion(version)
# So the final URL is the Archive one, plus the full versions, plus a
# final '/deb/' - and an arch subfolder
baseurl = Definitions.ARCHIVE + fullversion + '/deb/'
retval = {}
# x86 binaries are not anymore offered after 6.3.0.
if parse_version(version) < parse_version('6.3.0'):
retval['x86'] = baseurl + 'x86/'
else:
retval['x86'] = '-'
retval['x86_64'] = baseurl + 'x86_64/'
return retval
@staticmethod
def collectedbuilds(query):
"""Creates a list of Builds based on each namedver found."""
retval = []
a = Base.namedver(query)
if isinstance(a, list) and len(a) > 1:
retval = [ Build(query, version) for version in Base.namedver(query) ]
else:
retval.append(RemoteBuild(query))
return retval
class RemoteBuild(object):
def __init__(self, query, version = None):
"""Should simplify the single builded version."""
self.query = query self.query = query
self.arch = arch self.version = ''
self.basedirurl = { 'x86': '-', 'x86_64': '-' }
# Getting versions and so on
v = versions.BuildVersion(self.query)
self.version = v.version
print(f"Debug {self.version}")
self.short_version = str.join('.', self.version.split('.')[0:2])
self.branch_version = None
if not '.' in self.query: if not '.' in self.query:
self.branch_version = self.query # Named version.
self.url = v.basedirurl # Let's check if a specific version was requested.
if version:
# Other default values self.version = version
self.language = 'basic'
self.offline_help = False
self.portable = False
self.updatable = True
self.sign = True
self.storage_path = '/mnt/appimage'
self.download_path = '/var/tmp/downloads'
# Specific build version
self.appversion = ''
self.appimagefilename = {}
self.zsyncfilename = {}
# Creating a tempfile
self.builddir = tempfile.mkdtemp()
self.tarballs = {}
self.built = { u'x86': False, u'x86_64': False }
# Preparing the default for the relative path on the storage for
# different versions.
# The path will evaluated as part of the check() function, as it is
# understood the storage_path can be changed before that phase.
self.relative_path = []
self.full_path = ''
self.baseurl = ''
def calculate(self):
"""Calculate exclusions and other variables."""
# AppName
self.appname = 'LibreOffice' if not self.query == 'daily' and not self.query == 'prerelease' else 'LibreOfficeDev'
# Calculating languagepart
self.languagepart = "."
if ',' in self.language:
self.languagepart += self.language.replace(',', '-')
else:
self.languagepart += self.language
# Calculating help part
self.helppart = '.help' if self.offline_help else ''
# Building the required names
for arch in Build.ARCHSTD:
self.appimagefilename[arch] = self.__gen_appimagefilename__(self.version, arch)
self.zsyncfilename[arch] = self.appimagefilename[arch] + '.zsync'
# Mandate to the private function to calculate the full_path available
# for the storage and the checks.
self.__calculate_full_path__()
def __gen_appimagefilename__(self, version, arch):
"""Generalize the construction of the name of the app."""
self.appversion = version + self.languagepart + self.helppart
return self.appname + f'-{self.appversion}-{arch}.AppImage'
def __calculate_full_path__(self):
"""Calculate relative path of the build, based on internal other variables."""
if len(self.relative_path) == 0:
if self.query == 'daily':
self.relative_path.append('daily')
elif self.query == 'primageerelease':
self.relative_path.append('prerelease')
# Not the same check, an additional one
if self.portable:
self.relative_path.append('portable')
fullpath_arr = self.storage_path.split('/')
# Joining relative path only if it is not null
if len(self.relative_path) > 0:
fullpath_arr.extend(self.relative_path)
self.full_path = re.sub(r"/+", '/', str.join('/', fullpath_arr))
def check(self):
"""Checking if the requested AppImage has been already built."""
if not len(self.appimagefilename) == 2:
self.calculate()
for arch in self.arch:
print(f"Searching for {self.appimagefilename[arch]}")
res = subprocess.run(shlex.split(f"find {self.full_path} -name {self.appimagefilename[arch]}"), capture_output=True, env={ "LC_ALL": "C" }, text=True, encoding='utf-8')
if "No such file or directory" in res.stderr:
# Folder is not existent: so the version was not built
# Build stays false, and we go to the next arch
continue
if res.stdout and len(res.stdout.strip("\n")) > 0:
# All good, the command was executed fine.
print(f"Build for {self.version} found.")
self.built[arch] = True
if self.built[arch]:
print(f"The requested AppImage already exists on storage for {arch}. I'll skip downloading, building and moving the results.")
def download(self):
"""Downloads the contents of the URL as it was a folder."""
print(f"Started downloads for {self.version}. Please wait.")
for arch in self.arch:
# Checking if a valid path has been provided
if self.url[arch] == '-':
print(f"No build has been provided for the requested AppImage for {arch}. Continue with other options.")
# Faking already built it so to skip other checks.
self.built[arch] = True
continue
if self.built[arch]:
print(f"A build for {arch} was already found. Skipping specific packages.")
continue
# Identifying downloads
contents = etree.HTML(urllib.request.urlopen(self.url[arch]).read()).xpath("//td/a")
self.tarballs[arch] = [ x.text for x in contents if x.text.endswith('tar.gz') and 'deb' in x.text ]
tarballs = self.tarballs[arch]
maintarball = tarballs[0]
# Create and change directory to the download location
os.makedirs(self.download_path, exist_ok = True)
os.chdir(self.download_path)
for archive in tarballs:
# If the archive is already there, do not do anything.
if os.path.exists(archive):
continue
# Download the archive
try:
urllib.request.urlretrieve(self.url[arch] + archive, archive)
except:
print(f"Failed to download {archive}.")
print(f"Finished downloads for {self.version}.")
def build(self):
"""Building all the versions."""
for arch in self.arch:
if self.built[arch]:
# Already built for arch or path not available. User has already been warned.
continue
# Preparation tasks
self.appnamedir = os.path.join(self.builddir, self.appname)
os.makedirs(self.appnamedir, exist_ok=True)
# And then cd to the appname folder.
os.chdir(self.appnamedir)
# Download appimagetool from github
appimagetoolurl = f"https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-{arch}.AppImage"
urllib.request.urlretrieve(appimagetoolurl, 'appimagetool')
os.chmod('appimagetool', 0o755)
# Build the requested version.
self.__unpackbuild__(arch)
def __unpackbuild__(self, arch):
# We start by filtering out tarballs from the list
buildtarballs = [ self.tarballs[arch][0] ]
# Let's process standard languages and append results to the
# buildtarball
if self.language == 'basic':
if self.offline_help:
buildtarballs.extend([ x for x in self.tarballs[arch] if 'pack_en-GB' in x ])
else: else:
buildtarballs.extend([ x for x in self.tarballs[arch] if 'langpack_en-GB' in x]) # In case it was not requested, we will carry on the generic
elif self.language == 'standard': # namedver() query.
for lang in Build.LANGSTD: # If the results are more than one, we'll take the latest (since we are requested to provide a single build).
if self.offline_help: a = Base.namedver(self.query)
buildtarballs.extend([ x for x in self.tarballs[arch] if ('pack_' + lang) in x ])
else:
buildtarballs.extend([ x for x in self.tarballs[arch] if ('langpack_' + lang) in x ])
elif self.language == 'full':
if self.offline_help:
# We need also all help. Let's replace buildtarball with the
# whole bunch
buildtarballs = self.tarballs[arch]
else:
buildtarballs.extend([ x for x in self.tarballs[arch] if 'langpack' in x ])
else:
# Looping for each language in self.language
for lang in self.language.split(","):
if self.offline_help:
buildtarballs.extend([ x for x in self.tarballs[arch] if ('pack' + lang) in x ])
else:
buildtarballs.extend([ x for x in self.tarballs[arch] if ('langpack' + lang) in x ])
os.chdir(self.appnamedir) if isinstance(a, list) and len(a) == 0:
# No results from the query - let's return default values
return
# Unpacking the tarballs if len(a) == 1:
for archive in buildtarballs: # version is a single one.
subprocess.run(shlex.split(f"tar xzf {self.download_path}/{archive}")) self.version = a[0]
else:
# In this case, we will select the latest release.
self.version = sorted(a)[-1]
else:
# In case of numbered queries, put it as initial version
self.version = self.query
# create appimagedir if len(self.version.split('.')) < 4:
self.appimagedir = os.path.join(self.builddir, self.appname, self.appname + '.AppDir') # If not 4 dotted, let's search for the 4 dotted version
os.makedirs(self.appimagedir, exist_ok = True) self.version = Base.fullversion(self.version)
# At this point, let's decompress the deb packages
subprocess.run(shlex.split("find .. -iname '*.deb' -exec dpkg -x {} . \;"), cwd=self.appimagedir)
if self.portable:
subprocess.run(shlex.split("find . -type f -iname 'bootstraprc' -exec sed -i 's|^UserInstallation=.*|UserInstallation=\$SYSUSERCONFIG/libreoffice/%s|g' {} \+" % self.short_version), cwd=self.appimagedir)
# Changing desktop file
subprocess.run(shlex.split("find . -iname startcenter.desktop -exec cp {} . \;"), cwd=self.appimagedir)
subprocess.run(shlex.split("sed --in-place 's:^Name=.*$:Name=%s:' startcenter.desktop > startcenter.desktop" % self.appname), cwd=self.appimagedir)
subprocess.run(shlex.split("find . -name '*startcenter.png' -path '*hicolor*48x48*' -exec cp {} . \;"), cwd=self.appimagedir)
# Find the name of the binary called in the desktop file.
binaryname = ''
with open(os.path.join(self.appimagedir, 'startcenter.desktop'), 'r') as d:
a = d.readlines()
for line in a:
if re.match(r'^Exec', line):
binaryname = line.split('=')[-1].split(' ')[0]
# Esci al primo match
break
#binary_exec = subprocess.run(shlex.split(r"awk 'BEGIN { FS = \"=\" } /^Exec/ { print $2; exit }' startcenter.desktop | awk '{ print $1 }'"), cwd=self.appimagedir, text=True, encoding='utf-8')
#binaryname = binary_exec.stdout.strip("\n")
bindir=os.path.join(self.appimagedir, 'usr', 'bin')
os.makedirs(bindir, exist_ok = True)
subprocess.run(shlex.split("find ../../opt -iname soffice -path '*program*' -exec ln -sf {} ./%s \;" % binaryname), cwd=bindir)
# Download AppRun from github
apprunurl = f"https://github.com/AppImage/AppImageKit/releases/download/continuous/AppRun-{arch}"
dest = os.path.join(self.appimagedir, 'AppRun')
urllib.request.urlretrieve(apprunurl, dest)
os.chmod(dest, 0o755)
# Dealing with extra options
buildopts = []
if self.sign:
buildopts.append('--sign')
# adding zsync build if updatable
if self.updatable:
buildopts.append(f"-u 'zsync|{self.zsyncfilename[arch]}'")
buildopts_str = str.join(' ', buildopts)
# Build the number-specific build
subprocess.run(shlex.split(f"{self.appnamedir}/appimagetool {buildopts_str} -v ./{self.appname}.AppDir/"), env={ "VERSION": self.appversion })
print(f"Built AppImage version {self.appversion}") self.basedirurl = Base.urlfromqueryandver(self.query, self.version)
# Cleanup phase, before new run. def todict(self):
for deb in glob.glob(self.appnamedir + '/*.deb'): return {
os.remove(deb) 'query': self.query,
subprocess.run(shlex.split("find . -mindepth 1 -maxdepth 1 -type d -exec rm -rf {} \+")) 'version': self.version,
'basedirurl': self.basedirurl
}
def __str__(self):
def checksums(self): return f"""query: {self.query}
"""Create checksums of the built versions.""" version: {self.version}
# Skip checksum if initally the build was already found in the storage directory x86: {self.basedirurl['x86']}
if all(self.built.values()): x86_64: {self.basedirurl['x86_64']}"""
return
os.chdir(self.appnamedir)
for arch in self.arch:
for item in [ self.appimagefilename[arch], self.zsyncfilename[arch] ]:
# For any built arch, find out if a file exist.
self.__create_checksum__(item)
def __create_checksum__(self, file):
"""Internal function to create checksum file."""
checksum = subprocess.run(shlex.split(f"md5sum {file}"), capture_output=True, text=True, encoding='utf-8')
if checksum.stdout:
with open(f"{file}.md5", 'w') as c:
c.write(checksum.stdout)
def publish(self):
"""Moves built versions to definitive storage."""
if all(self.built.values()):
# All files are already present in the full_path
return
os.chdir(self.appnamedir)
# Forcing creation of subfolders, in case there is a new build
os.makedirs(self.full_path, exist_ok = True)
for file in glob.glob("*.AppImage*"):
subprocess.run(shlex.split(f"cp -f {file} {self.full_path}"))
def generalize_and_link(self):
"""Creates the needed generalized files if needed."""
# If called with a pointed version, no generalize and link necessary.
if not self.branch_version:
return
appimagefilename = {}
zsyncfilename = {}
# Creating versions for short version and query text
versions = [ self.short_version, self.branch_version ]
for arch in Build.ARCHSTD:
# If already built, do not do anything.
if self.built[arch]:
continue
os.chdir(self.full_path)
# if the appimage for the reported arch is not found, skip to next
# arch
if not os.path.exists(self.appimagefilename[arch]):
continue
# Doing it both for short_name and for branchname
for version in versions:
appimagefilename[arch] = self.appname + '-' + version + self.languagepart + self.helppart + f'-{arch}.AppImage'
zsyncfilename[arch] = appimagefilename[arch] + '.zsync'
# Create the symlink
print(f"Creating {appimagefilename[arch]} and checksums.")
if os.path.exists(appimagefilename[arch]):
os.unlink(appimagefilename[arch])
os.symlink(self.appimagefilename[arch], appimagefilename[arch])
# Create the checksum for the AppImage
self.__create_checksum__(appimagefilename[arch])
# Do not continue if no zsync file is provided.
if not self.updatable:
continue
print(f"Creating zsync file for version {version}.")
if os.path.exists(zsyncfilename[arch]):
os.unlink(zsyncfilename[arch])
shutil.copyfile(self.zsyncfilename[arch], zsyncfilename[arch])
# Editing the zsyncfile
subprocess.run(shlex.split(f"sed --in-place 's/^Filename:.*$/Filename: {appimagefilename[arch]}/' {zsyncfilename[arch]}"))
self.__create_checksum__(zsyncfilename[arch])
def __del__(self):
"""Destructor"""
# Cleaning up build directory
shutil.rmtree(self.builddir)

367
loaih/build.py Normal file
View File

@ -0,0 +1,367 @@
#!/usr/bin/env python3
import urllib.request
import loaih
from lxml import etree
import tempfile, os, sys, glob, subprocess, shutil, re, shlex
class Build(object):
LANGSTD = [ 'ar', 'de', 'en-GB', 'es', 'fr', 'it', 'ja', 'ko', 'pt', 'pt-BR', 'ru', 'zh-CN', 'zh-TW' ]
LANGBASIC = [ 'en-GB' ]
ARCHSTD = [ u'x86', u'x86_64' ]
def __init__(self, query, arch):
"""Build all versions that can be found in the indicated repo."""
self.query = query
self.arch = arch
# Getting versions and so on
v = versions.BuildVersion(self.query)
self.version = v.version
print(f"Debug {self.version}")
self.short_version = str.join('.', self.version.split('.')[0:2])
self.branch_version = None
if not '.' in self.query:
self.branch_version = self.query
self.url = v.basedirurl
# Other default values
self.language = 'basic'
self.offline_help = False
self.portable = False
self.updatable = True
self.sign = True
self.storage_path = '/mnt/appimage'
self.download_path = '/var/tmp/downloads'
# Specific build version
self.appversion = ''
self.appimagefilename = {}
self.zsyncfilename = {}
# Creating a tempfile
self.builddir = tempfile.mkdtemp()
self.tarballs = {}
self.built = { u'x86': False, u'x86_64': False }
# Preparing the default for the relative path on the storage for
# different versions.
# The path will evaluated as part of the check() function, as it is
# understood the storage_path can be changed before that phase.
self.relative_path = []
self.full_path = ''
self.baseurl = ''
def calculate(self):
"""Calculate exclusions and other variables."""
# AppName
self.appname = 'LibreOffice' if not self.query == 'daily' and not self.query == 'prerelease' else 'LibreOfficeDev'
# Calculating languagepart
self.languagepart = "."
if ',' in self.language:
self.languagepart += self.language.replace(',', '-')
else:
self.languagepart += self.language
# Calculating help part
self.helppart = '.help' if self.offline_help else ''
# Building the required names
for arch in Build.ARCHSTD:
self.appimagefilename[arch] = self.__gen_appimagefilename__(self.version, arch)
self.zsyncfilename[arch] = self.appimagefilename[arch] + '.zsync'
# Mandate to the private function to calculate the full_path available
# for the storage and the checks.
self.__calculate_full_path__()
def __gen_appimagefilename__(self, version, arch):
"""Generalize the construction of the name of the app."""
self.appversion = version + self.languagepart + self.helppart
return self.appname + f'-{self.appversion}-{arch}.AppImage'
def __calculate_full_path__(self):
"""Calculate relative path of the build, based on internal other variables."""
if len(self.relative_path) == 0:
if self.query == 'daily':
self.relative_path.append('daily')
elif self.query == 'primageerelease':
self.relative_path.append('prerelease')
# Not the same check, an additional one
if self.portable:
self.relative_path.append('portable')
fullpath_arr = self.storage_path.split('/')
# Joining relative path only if it is not null
if len(self.relative_path) > 0:
fullpath_arr.extend(self.relative_path)
self.full_path = re.sub(r"/+", '/', str.join('/', fullpath_arr))
def check(self):
"""Checking if the requested AppImage has been already built."""
if not len(self.appimagefilename) == 2:
self.calculate()
for arch in self.arch:
print(f"Searching for {self.appimagefilename[arch]}")
res = subprocess.run(shlex.split(f"find {self.full_path} -name {self.appimagefilename[arch]}"), capture_output=True, env={ "LC_ALL": "C" }, text=True, encoding='utf-8')
if "No such file or directory" in res.stderr:
# Folder is not existent: so the version was not built
# Build stays false, and we go to the next arch
continue
if res.stdout and len(res.stdout.strip("\n")) > 0:
# All good, the command was executed fine.
print(f"Build for {self.version} found.")
self.built[arch] = True
if self.built[arch]:
print(f"The requested AppImage already exists on storage for {arch}. I'll skip downloading, building and moving the results.")
def download(self):
"""Downloads the contents of the URL as it was a folder."""
print(f"Started downloads for {self.version}. Please wait.")
for arch in self.arch:
# Checking if a valid path has been provided
if self.url[arch] == '-':
print(f"No build has been provided for the requested AppImage for {arch}. Continue with other options.")
# Faking already built it so to skip other checks.
self.built[arch] = True
continue
if self.built[arch]:
print(f"A build for {arch} was already found. Skipping specific packages.")
continue
# Identifying downloads
contents = etree.HTML(urllib.request.urlopen(self.url[arch]).read()).xpath("//td/a")
self.tarballs[arch] = [ x.text for x in contents if x.text.endswith('tar.gz') and 'deb' in x.text ]
tarballs = self.tarballs[arch]
maintarball = tarballs[0]
# Create and change directory to the download location
os.makedirs(self.download_path, exist_ok = True)
os.chdir(self.download_path)
for archive in tarballs:
# If the archive is already there, do not do anything.
if os.path.exists(archive):
continue
# Download the archive
try:
urllib.request.urlretrieve(self.url[arch] + archive, archive)
except:
print(f"Failed to download {archive}.")
print(f"Finished downloads for {self.version}.")
def build(self):
"""Building all the versions."""
for arch in self.arch:
if self.built[arch]:
# Already built for arch or path not available. User has already been warned.
continue
# Preparation tasks
self.appnamedir = os.path.join(self.builddir, self.appname)
os.makedirs(self.appnamedir, exist_ok=True)
# And then cd to the appname folder.
os.chdir(self.appnamedir)
# Download appimagetool from github
appimagetoolurl = f"https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-{arch}.AppImage"
urllib.request.urlretrieve(appimagetoolurl, 'appimagetool')
os.chmod('appimagetool', 0o755)
# Build the requested version.
self.__unpackbuild__(arch)
def __unpackbuild__(self, arch):
# We start by filtering out tarballs from the list
buildtarballs = [ self.tarballs[arch][0] ]
# Let's process standard languages and append results to the
# buildtarball
if self.language == 'basic':
if self.offline_help:
buildtarballs.extend([ x for x in self.tarballs[arch] if 'pack_en-GB' in x ])
else:
buildtarballs.extend([ x for x in self.tarballs[arch] if 'langpack_en-GB' in x])
elif self.language == 'standard':
for lang in Build.LANGSTD:
if self.offline_help:
buildtarballs.extend([ x for x in self.tarballs[arch] if ('pack_' + lang) in x ])
else:
buildtarballs.extend([ x for x in self.tarballs[arch] if ('langpack_' + lang) in x ])
elif self.language == 'full':
if self.offline_help:
# We need also all help. Let's replace buildtarball with the
# whole bunch
buildtarballs = self.tarballs[arch]
else:
buildtarballs.extend([ x for x in self.tarballs[arch] if 'langpack' in x ])
else:
# Looping for each language in self.language
for lang in self.language.split(","):
if self.offline_help:
buildtarballs.extend([ x for x in self.tarballs[arch] if ('pack' + lang) in x ])
else:
buildtarballs.extend([ x for x in self.tarballs[arch] if ('langpack' + lang) in x ])
os.chdir(self.appnamedir)
# Unpacking the tarballs
for archive in buildtarballs:
subprocess.run(shlex.split(f"tar xzf {self.download_path}/{archive}"))
# create appimagedir
self.appimagedir = os.path.join(self.builddir, self.appname, self.appname + '.AppDir')
os.makedirs(self.appimagedir, exist_ok = True)
# At this point, let's decompress the deb packages
subprocess.run(shlex.split("find .. -iname '*.deb' -exec dpkg -x {} . \;"), cwd=self.appimagedir)
if self.portable:
subprocess.run(shlex.split("find . -type f -iname 'bootstraprc' -exec sed -i 's|^UserInstallation=.*|UserInstallation=\$SYSUSERCONFIG/libreoffice/%s|g' {} \+" % self.short_version), cwd=self.appimagedir)
# Changing desktop file
subprocess.run(shlex.split("find . -iname startcenter.desktop -exec cp {} . \;"), cwd=self.appimagedir)
subprocess.run(shlex.split("sed --in-place 's:^Name=.*$:Name=%s:' startcenter.desktop > startcenter.desktop" % self.appname), cwd=self.appimagedir)
subprocess.run(shlex.split("find . -name '*startcenter.png' -path '*hicolor*48x48*' -exec cp {} . \;"), cwd=self.appimagedir)
# Find the name of the binary called in the desktop file.
binaryname = ''
with open(os.path.join(self.appimagedir, 'startcenter.desktop'), 'r') as d:
a = d.readlines()
for line in a:
if re.match(r'^Exec', line):
binaryname = line.split('=')[-1].split(' ')[0]
# Esci al primo match
break
#binary_exec = subprocess.run(shlex.split(r"awk 'BEGIN { FS = \"=\" } /^Exec/ { print $2; exit }' startcenter.desktop | awk '{ print $1 }'"), cwd=self.appimagedir, text=True, encoding='utf-8')
#binaryname = binary_exec.stdout.strip("\n")
bindir=os.path.join(self.appimagedir, 'usr', 'bin')
os.makedirs(bindir, exist_ok = True)
subprocess.run(shlex.split("find ../../opt -iname soffice -path '*program*' -exec ln -sf {} ./%s \;" % binaryname), cwd=bindir)
# Download AppRun from github
apprunurl = f"https://github.com/AppImage/AppImageKit/releases/download/continuous/AppRun-{arch}"
dest = os.path.join(self.appimagedir, 'AppRun')
urllib.request.urlretrieve(apprunurl, dest)
os.chmod(dest, 0o755)
# Dealing with extra options
buildopts = []
if self.sign:
buildopts.append('--sign')
# adding zsync build if updatable
if self.updatable:
buildopts.append(f"-u 'zsync|{self.zsyncfilename[arch]}'")
buildopts_str = str.join(' ', buildopts)
# Build the number-specific build
subprocess.run(shlex.split(f"{self.appnamedir}/appimagetool {buildopts_str} -v ./{self.appname}.AppDir/"), env={ "VERSION": self.appversion })
print(f"Built AppImage version {self.appversion}")
# Cleanup phase, before new run.
for deb in glob.glob(self.appnamedir + '/*.deb'):
os.remove(deb)
subprocess.run(shlex.split("find . -mindepth 1 -maxdepth 1 -type d -exec rm -rf {} \+"))
def checksums(self):
"""Create checksums of the built versions."""
# Skip checksum if initally the build was already found in the storage directory
if all(self.built.values()):
return
os.chdir(self.appnamedir)
for arch in self.arch:
for item in [ self.appimagefilename[arch], self.zsyncfilename[arch] ]:
# For any built arch, find out if a file exist.
self.__create_checksum__(item)
def __create_checksum__(self, file):
"""Internal function to create checksum file."""
checksum = subprocess.run(shlex.split(f"md5sum {file}"), capture_output=True, text=True, encoding='utf-8')
if checksum.stdout:
with open(f"{file}.md5", 'w') as c:
c.write(checksum.stdout)
def publish(self):
"""Moves built versions to definitive storage."""
if all(self.built.values()):
# All files are already present in the full_path
return
os.chdir(self.appnamedir)
# Forcing creation of subfolders, in case there is a new build
os.makedirs(self.full_path, exist_ok = True)
for file in glob.glob("*.AppImage*"):
subprocess.run(shlex.split(f"cp -f {file} {self.full_path}"))
def generalize_and_link(self):
"""Creates the needed generalized files if needed."""
# If called with a pointed version, no generalize and link necessary.
if not self.branch_version:
return
appimagefilename = {}
zsyncfilename = {}
# Creating versions for short version and query text
versions = [ self.short_version, self.branch_version ]
for arch in Build.ARCHSTD:
# If already built, do not do anything.
if self.built[arch]:
continue
os.chdir(self.full_path)
# if the appimage for the reported arch is not found, skip to next
# arch
if not os.path.exists(self.appimagefilename[arch]):
continue
# Doing it both for short_name and for branchname
for version in versions:
appimagefilename[arch] = self.appname + '-' + version + self.languagepart + self.helppart + f'-{arch}.AppImage'
zsyncfilename[arch] = appimagefilename[arch] + '.zsync'
# Create the symlink
print(f"Creating {appimagefilename[arch]} and checksums.")
if os.path.exists(appimagefilename[arch]):
os.unlink(appimagefilename[arch])
os.symlink(self.appimagefilename[arch], appimagefilename[arch])
# Create the checksum for the AppImage
self.__create_checksum__(appimagefilename[arch])
# Do not continue if no zsync file is provided.
if not self.updatable:
continue
print(f"Creating zsync file for version {version}.")
if os.path.exists(zsyncfilename[arch]):
os.unlink(zsyncfilename[arch])
shutil.copyfile(self.zsyncfilename[arch], zsyncfilename[arch])
# Editing the zsyncfile
subprocess.run(shlex.split(f"sed --in-place 's/^Filename:.*$/Filename: {appimagefilename[arch]}/' {zsyncfilename[arch]}"))
self.__create_checksum__(zsyncfilename[arch])
def __del__(self):
"""Destructor"""
# Cleaning up build directory
shutil.rmtree(self.builddir)

View File

@ -24,10 +24,10 @@ def getversion(query, jsonout):
for q in queries: for q in queries:
if '.' in q: if '.' in q:
# Numbered version. It is safe to send it to Build. # Numbered version. It is safe to send it to Build.
b.append(loaih.versions.Build(q)) b.append(loaih.RemoteBuild(q))
else: else:
# Named version. For safety, we call a helper method for a collection # Named version. For safety, we call a helper method for a collection
b.extend(loaih.versions.Base.collectedbuilds(q)) b.extend(loaih.Base.collectedbuilds(q))
if len(b) > 0: if len(b) > 0:
if jsonout: if jsonout:

View File

@ -1,195 +0,0 @@
#!/usr/bin/env python
# encoding: utf-8
import urllib.request
from lxml import etree
from packaging.version import parse as parse_version
import datetime
class Definitions(object):
DOWNLOADPAGE = "https://www.libreoffice.org/download/download/"
ARCHIVE = "https://downloadarchive.documentfoundation.org/libreoffice/old/"
RELEASE = "https://download.documentfoundation.org/libreoffice/stable/"
DAILY = "https://dev-builds.libreoffice.org/daily/master/Linux-rpm_deb-x86_64@tb87-TDF/"
PRERELEASE = "https://dev-builds.libreoffice.org/pre-releases/deb/x86_64/"
SELECTORS = {
'still': {
'URL': DOWNLOADPAGE,
'xpath': '(//span[@class="dl_version_number"])[last()]/text()'
},
'fresh': {
'URL': DOWNLOADPAGE,
'xpath': '(//span[@class="dl_version_number"])[1]/text()'
},
'prerelease': {
'URL': DOWNLOADPAGE,
'xpath': '//p[@class="lead_libre"][last()]/following-sibling::ul[last()]/li/a/text()'
},
'daily': {
'URL': DAILY,
'xpath': '//td/a'
}
}
class Base(object):
# Class for static methods which might be useful even outside the build
# scripts.
@staticmethod
def dailyurl(date = datetime.datetime.today()):
"""Returns the URL for the latest valid daily build."""
# As per other parts of the build, we need to maintain an URL also for
# x86 versions that it isn't really provided.
# As such, the return value must be a dictionary
# Get the anchor for today's builds
a = etree.HTML(urllib.request.urlopen(Definitions.DAILY).read()).xpath("//td/a[contains(text(), '" + date.strftime('%Y-%m-%d') + "')]/text()")
if len(a) == 0:
# No results found, no version found, let's return a
return { 'x86': '-', 'x86_64': '-' }
# On the contrary, more than a version is found. let's order the
# list and get the latest item
return { 'x86': '-', 'x86_64': Definitions.SELECTORS['daily']['URL'] + sorted(a)[-1] }
@staticmethod
def dailyver(date = datetime.datetime.today()):
"""Returns versions present on the latest daily build."""
url = Base.dailyurl(date)['x86_64']
# If no daily releases has been provided yet, return empty
if url == '-':
return []
# Rerun the page parsing, this time to find out the versions built
b = etree.HTML(urllib.request.urlopen(url).read()).xpath("//td/a[contains(text(), '_deb.tar.gz')]/text()")
# This should have returned the main package for a version, but can
# have returned multiple ones, so let's treat it as a list
return [ x.split('_')[1] for x in b ]
@staticmethod
def namedver(query):
"""Gets the version for a specific named version."""
if query == 'daily' or query == 'yesterday':
# Daily needs double parsing for the same result to apply.
# We first select today's build anchor:
date = datetime.datetime.today()
if query == 'yesterday':
# Use yesterdays' date for testing purposes.
date += datetime.timedelta(days=-1)
return Base.dailyver(date)
# In case the query isn't for daily
return etree.HTML(urllib.request.urlopen(Definitions.SELECTORS[query]['URL']).read()).xpath(Definitions.SELECTORS[query]['xpath'])
@staticmethod
def fullversion(version):
"""Get latest full version from Archive based on partial version."""
versionlist = etree.HTML(urllib.request.urlopen(Definitions.ARCHIVE).read()).xpath(f"//td/a[starts-with(text(), '{version}')]/text()")
cleanlist = sorted([ x.strip('/') for x in versionlist ])
# Sorting, then returning the last version
return cleanlist[-1]
@staticmethod
def urlfromqueryandver(query, version):
"""Returns the fetching URL based on the queried version and the numeric version of it."""
# This has the purpose to simplify and explain how the releases are
# layed out.
# If the query tells about daily or 'yesterday' (for testing purposes),
# we might ignore versions and return the value coming from dailyurl:
if query == 'daily':
return Base.dailyurl()
if query == 'yesterday':
date = datetime.datetime.today() + datetime.timedelta(days=-1)
return Base.dailyurl(date)
# All other versions will be taken from Archive, as such we need a full
# version.
# If the version has only 2 points in it (or splits into three parts by '.'), that's not a full version and we will call the getlatestver() function
fullversion = version
if len(version.split('.')) <= 3:
fullversion = Base.fullversion(version)
# So the final URL is the Archive one, plus the full versions, plus a
# final '/deb/' - and an arch subfolder
baseurl = Definitions.ARCHIVE + fullversion + '/deb/'
retval = {}
# x86 binaries are not anymore offered after 6.3.0.
if parse_version(version) < parse_version('6.3.0'):
retval['x86'] = baseurl + 'x86/'
else:
retval['x86'] = '-'
retval['x86_64'] = baseurl + 'x86_64/'
return retval
@staticmethod
def collectedbuilds(query):
"""Creates a list of Builds based on each namedver found."""
retval = []
a = Base.namedver(query)
if isinstance(a, list) and len(a) > 1:
retval = [ Build(query, version) for version in Base.namedver(query) ]
else:
retval.append(Build(query))
return retval
class Build(object):
def __init__(self, query, version = None):
"""Should simplify the single builded version."""
self.query = query
self.version = ''
self.basedirurl = { 'x86': '-', 'x86_64': '-' }
if not '.' in self.query:
# Named version.
# Let's check if a specific version was requested.
if version:
self.version = version
else:
# In case it was not requested, we will carry on the generic
# namedver() query.
# If the results are more than one, we'll take the latest (since we are requested to provide a single build).
a = Base.namedver(self.query)
if isinstance(a, list) and len(a) == 0:
# No results from the query - let's return default values
return
if len(a) == 1:
# version is a single one.
self.version = a[0]
else:
# In this case, we will select the latest release.
self.version = sorted(a)[-1]
else:
# In case of numbered queries, put it as initial version
self.version = self.query
if len(self.version.split('.')) < 4:
# If not 4 dotted, let's search for the 4 dotted version
self.version = Base.fullversion(self.version)
self.basedirurl = Base.urlfromqueryandver(self.query, self.version)
def todict(self):
return {
'query': self.query,
'version': self.version,
'basedirurl': self.basedirurl
}
def __str__(self):
return f"""query: {self.query}
version: {self.version}
x86: {self.basedirurl['x86']}
x86_64: {self.basedirurl['x86_64']}"""