Revised and tested getversion flow. Revised but not tested the build flows.
This commit is contained in:
parent
d9775f4f94
commit
024535afa9
|
@ -4,38 +4,46 @@
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
import json
|
import json
|
||||||
import loaih.solvers
|
import requests
|
||||||
|
from lxml import html
|
||||||
from packaging.version import parse as parse_version
|
from packaging.version import parse as parse_version
|
||||||
|
|
||||||
class Definitions(): # pylint: disable=too-few-public-methods
|
# Constants
|
||||||
"""Definitions for the module."""
|
DOWNLOADPAGE = "https://www.libreoffice.org/download/download/"
|
||||||
|
ARCHIVE = "https://downloadarchive.documentfoundation.org/libreoffice/old/"
|
||||||
|
RELEASE = "https://download.documentfoundation.org/libreoffice/stable/"
|
||||||
|
DAILY = "https://dev-builds.libreoffice.org/daily/master/"
|
||||||
|
PRERELEASE = "https://dev-builds.libreoffice.org/pre-releases/deb/x86_64/"
|
||||||
|
|
||||||
DOWNLOADPAGE = "https://www.libreoffice.org/download/download/"
|
SELECTORS = {
|
||||||
ARCHIVE = "https://downloadarchive.documentfoundation.org/libreoffice/old/"
|
'still': {
|
||||||
RELEASE = "https://download.documentfoundation.org/libreoffice/stable/"
|
'URL': DOWNLOADPAGE,
|
||||||
DAILY = "https://dev-builds.libreoffice.org/daily/master/"
|
'xpath': '(//span[@class="dl_version_number"])[last()]/text()'
|
||||||
PRERELEASE = "https://dev-builds.libreoffice.org/pre-releases/deb/x86_64/"
|
},
|
||||||
|
'fresh': {
|
||||||
SELECTORS = {
|
'URL': DOWNLOADPAGE,
|
||||||
'still': {
|
'xpath': '(//span[@class="dl_version_number"])[1]/text()'
|
||||||
'URL': DOWNLOADPAGE,
|
},
|
||||||
'xpath': '(//span[@class="dl_version_number"])[last()]/text()'
|
'prerelease': {
|
||||||
},
|
'URL': DOWNLOADPAGE,
|
||||||
'fresh': {
|
'xpath': '//p[@class="lead_libre"][last()]/following-sibling::ul[last()]/li/a/text()'
|
||||||
'URL': DOWNLOADPAGE,
|
},
|
||||||
'xpath': '(//span[@class="dl_version_number"])[1]/text()'
|
'daily': {
|
||||||
},
|
'URL': DAILY,
|
||||||
'prerelease': {
|
'xpath': '//td/a'
|
||||||
'URL': DOWNLOADPAGE,
|
|
||||||
'xpath': '//p[@class="lead_libre"][last()]/following-sibling::ul[last()]/li/a/text()'
|
|
||||||
},
|
|
||||||
'daily': {
|
|
||||||
'URL': DAILY,
|
|
||||||
'xpath': '//td/a'
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Generic functions
|
||||||
|
def match_xpath(url: str, xpath: str):
|
||||||
|
"""Uses a couple of extensions to get results over webpage."""
|
||||||
|
resource = requests.get(url, timeout=10)
|
||||||
|
parsed = html.fromstring(resource.content)
|
||||||
|
return parsed.xpath(xpath)
|
||||||
|
|
||||||
|
|
||||||
|
# Classes
|
||||||
class Version():
|
class Version():
|
||||||
"""Represent the skeleton of each queried version."""
|
"""Represent the skeleton of each queried version."""
|
||||||
|
|
||||||
|
@ -65,284 +73,151 @@ version: {self.version}
|
||||||
x86: {self.urls['x86']}
|
x86: {self.urls['x86']}
|
||||||
x86_64: {self.urls['x86_64']}"""
|
x86_64: {self.urls['x86_64']}"""
|
||||||
|
|
||||||
class QueryError(Exception): pass
|
class QueryError(Exception):
|
||||||
|
"""Standard exception for errors regarding queries."""
|
||||||
|
|
||||||
class Query():
|
|
||||||
"""Represents each query and helps determining the aspects of a version."""
|
|
||||||
|
|
||||||
def __init__(self, query: str, default_to_current = False):
|
class Solver():
|
||||||
self.text = query
|
"""Generic solver to call others."""
|
||||||
self.type = ''
|
|
||||||
|
def __init__(self, text: str, default_to_current = False):
|
||||||
|
self.text = text
|
||||||
|
self.version = None
|
||||||
self.default_to_current = default_to_current
|
self.default_to_current = default_to_current
|
||||||
|
self.baseurl = ARCHIVE
|
||||||
|
|
||||||
# Let's first determine which type of query we are doing.
|
def solve(self):
|
||||||
|
"""Splits the query text possibilities, calling all the rest of the solvers."""
|
||||||
|
|
||||||
|
solver = self
|
||||||
if self.text in { 'current', 'yesterday', 'daily' }:
|
if self.text in { 'current', 'yesterday', 'daily' }:
|
||||||
self.type = 'daily'
|
solver = DailySolver(self.text, self.default_to_current)
|
||||||
elif self.text in { 'still', 'fresh', 'prerelease' }:
|
elif self.text in { 'still', 'fresh', 'prerelease' }:
|
||||||
self.type = 'named'
|
solver = NamedSolver(self.text)
|
||||||
elif '.' in self.text:
|
elif '.' in self.text:
|
||||||
self.type = 'exact_version'
|
solver = NumberedSolver(self.text)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
int(query)
|
int(self.text)
|
||||||
|
solver = DailySolver(self.text, self.default_to_current)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise QueryError("The queried version does not exist.")
|
raise QueryError("The queried version does not exist.")
|
||||||
|
|
||||||
# Since the query is number only, let's let it leave with the
|
self.version = solver.solve()
|
||||||
# number, but set the type to 'daily'
|
self.baseurl = solver.baseurl
|
||||||
self.type = 'daily'
|
return self.version
|
||||||
|
|
||||||
# call Base.parse_query to popolate the results
|
def to_version(self):
|
||||||
self.results = loaih.solvers.Solver.parse_query(self)
|
retval = Version()
|
||||||
|
retval.query = self.text
|
||||||
|
retval.version = self.version
|
||||||
class Base():
|
retval.urls['x86_64'] = self.baseurl
|
||||||
"""Contains methods that might be useful outside class."""
|
|
||||||
# Class for static methods which might be useful even outside the build
|
|
||||||
# scripts.
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def dailyurl(date = datetime.datetime.today(), default_current=False):
|
|
||||||
"""Returns the URL for the latest valid daily build."""
|
|
||||||
# As per other parts of the build, we need to maintain an URL also for
|
|
||||||
# x86 versions that it isn't really provided.
|
|
||||||
# As such, the return value must be a dictionary
|
|
||||||
|
|
||||||
# Fixing daily selector
|
|
||||||
# As seen, the number of the tinderbox building the daily version can
|
|
||||||
# change. We try to fulfill the void by adding a step.
|
|
||||||
tinderboxpage = etree.HTML(#pylint: disable=c-extension-no-member
|
|
||||||
urllib.request.urlopen(Definitions.DAILY).read() #pylint: disable=consider-using-with
|
|
||||||
)
|
|
||||||
xpath = "//td/a[starts-with(text(), 'Linux-rpm_deb-x86') and contains(text(), 'TDF/')]/text()" #pylint: disable=line-too-long
|
|
||||||
tburl = str(tinderboxpage.xpath(xpath)[0])
|
|
||||||
daily_selector = f"{Definitions.DAILY}{tburl}"
|
|
||||||
|
|
||||||
# Get the anchor for today's builds
|
|
||||||
raw_page = etree.HTML(urllib.request.urlopen(daily_selector).read())# pylint: disable=c-extension-no-member,consider-using-with
|
|
||||||
|
|
||||||
|
|
||||||
results = raw_page.xpath(
|
|
||||||
f"""//td/a[contains(text(), "{date.strftime('%Y-%m-%d')}")]/text()"""
|
|
||||||
)
|
|
||||||
if len(results) > 0:
|
|
||||||
# On the contrary, more than a version is found. let's order the
|
|
||||||
# list and get the latest item
|
|
||||||
return { 'x86': '-', 'x86_64': f"{daily_selector}{sorted(results)[-1]}" }
|
|
||||||
# No results found, no version found.
|
|
||||||
# But if default_current is true, redo the search with 'current'
|
|
||||||
if not default_current:
|
|
||||||
return { 'x86': '-', 'x86_64': '-' }
|
|
||||||
|
|
||||||
# default_current is true - redo all the queries for current
|
|
||||||
current_link = raw_page.xpath("//td/a[contains(text(), 'current']/text()")[0]
|
|
||||||
return {'x86': '-', 'x86_64': f"{daily_selector}{current_link}" }
|
|
||||||
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def dailyver(date = datetime.datetime.today(), force_current = False):
|
|
||||||
"""Returns versions present on the latest daily build."""
|
|
||||||
url = Base.dailyurl(date, force_current)['x86_64']
|
|
||||||
# If no daily releases has been provided yet, return empty
|
|
||||||
if url == '-':
|
|
||||||
return []
|
|
||||||
|
|
||||||
# Rerun the page parsing, this time to find out the versions built
|
|
||||||
fullpage = urllib.request.urlopen(url).read() #pylint: disable=consider-using-with
|
|
||||||
archive_path = "//td/a[contains(text(), '_deb.tar.gz')]/text()"
|
|
||||||
tarball = etree.HTML(fullpage).xpath(archive_path)#pylint: disable=c-extension-no-member
|
|
||||||
# This should have returned the main package for a version, but can
|
|
||||||
# have returned multiple ones, so let's treat it as a list
|
|
||||||
tarball_versions = [ x.split('_')[1] for x in tarball ]
|
|
||||||
if len(tarball_versions) == 1:
|
|
||||||
return tarball_versions[0]
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def namedver(query):
|
|
||||||
"""Gets the version for a specific named version."""
|
|
||||||
|
|
||||||
if 'current' in query:
|
|
||||||
# Should return the daily version with the current link
|
|
||||||
return Base.dailyver(datetime.datetime.today(), True)
|
|
||||||
|
|
||||||
if 'yesterday' in query:
|
|
||||||
return Base.dailyver(datetime.datetime.now() + datetime.timedelta(days=-1))
|
|
||||||
|
|
||||||
if 'daily' in query:
|
|
||||||
return Base.dailyver()
|
|
||||||
|
|
||||||
# If the name is convertible to integer, it means it is written as
|
|
||||||
# <year><month><date> and we ask for a daily build.
|
|
||||||
try:
|
|
||||||
int(query)
|
|
||||||
except ValueError:
|
|
||||||
# All other options - fresh, prerelease, still
|
|
||||||
return etree.HTML(#pylint: disable=c-extension-no-member
|
|
||||||
urllib.request.urlopen(
|
|
||||||
Definitions.SELECTORS[query]['URL']
|
|
||||||
).read()
|
|
||||||
).xpath(Definitions.SELECTORS[query]['xpath'])
|
|
||||||
|
|
||||||
# Lets restart with integer version
|
|
||||||
return Base.dailyver(datetime.datetime.strptime(query, "%Y%m%d"))
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def fullversion(version):
|
|
||||||
"""Get latest full version from Archive based on partial version."""
|
|
||||||
versionlist = etree.HTML(urllib.request.urlopen(Definitions.ARCHIVE).read()).xpath(f"//td/a[starts-with(text(), '{version}')]/text()")
|
|
||||||
if versionlist:
|
|
||||||
cleanlist = sorted([ x.strip('/') for x in versionlist ])
|
|
||||||
|
|
||||||
# Sorting, then returning the last version
|
|
||||||
return cleanlist[-1]
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def urlfromqueryandver(query, version):
|
|
||||||
"""Returns the fetching URL based on the queried version and the numeric version of it."""
|
|
||||||
# This has the purpose to simplify and explain how the releases are
|
|
||||||
# layed out.
|
|
||||||
|
|
||||||
# If the query tells about daily or 'yesterday' (for testing purposes),
|
|
||||||
# we might ignore versions and return the value coming from dailyurl:
|
|
||||||
if query == 'daily':
|
|
||||||
return Base.dailyurl()
|
|
||||||
if query == 'yesterday':
|
|
||||||
date = datetime.datetime.today() + datetime.timedelta(days=-1)
|
|
||||||
return Base.dailyurl(date)
|
|
||||||
|
|
||||||
# All other versions will be taken from Archive, as such we need a full
|
|
||||||
# version.
|
|
||||||
|
|
||||||
# If the version has only 2 points in it (or splits into three parts by '.'), that's not a full version and we will call the getlatestver() function
|
|
||||||
fullversion = str(version)
|
|
||||||
if len(fullversion.split('.')) <= 3:
|
|
||||||
fullversion = str(Base.fullversion(version))
|
|
||||||
|
|
||||||
# So the final URL is the Archive one, plus the full versions, plus a
|
|
||||||
# final '/deb/' - and an arch subfolder
|
|
||||||
baseurl = Definitions.ARCHIVE + fullversion + '/deb/'
|
|
||||||
retval = {}
|
|
||||||
|
|
||||||
# x86 binaries are not anymore offered after 6.3.0.
|
|
||||||
if parse_version(fullversion) < parse_version('6.3.0'):
|
|
||||||
retval['x86'] = baseurl + 'x86/'
|
|
||||||
else:
|
|
||||||
retval['x86'] = '-'
|
|
||||||
|
|
||||||
retval['x86_64'] = baseurl + 'x86_64/'
|
|
||||||
|
|
||||||
return retval
|
return retval
|
||||||
|
|
||||||
#@staticmethod
|
@staticmethod
|
||||||
#def collectedbuilds(query, default_current=False):
|
def parse(text: str, default_to_current = False):
|
||||||
# """Creates a list of Builds based on each queried version found."""
|
"""Calling the same as solver class."""
|
||||||
# retval = []
|
retval = Solver(text, default_to_current)
|
||||||
# if '.' in query:
|
retval.solve()
|
||||||
# # Called with a numeric query. Pass it to RemoteBuild
|
return retval.to_version()
|
||||||
# retval.append(RemoteBuild(query))
|
|
||||||
# else:
|
|
||||||
# try:
|
|
||||||
# int(query)
|
|
||||||
# except ValueError:
|
|
||||||
# # Named query
|
|
||||||
# named_version = Base.namedver(query)
|
|
||||||
# # named_version should be just one version anyway
|
|
||||||
# if isinstance(named_version, list) and len(named_version) > 1:
|
|
||||||
# for ver in named_version:
|
|
||||||
# if 'daily' in query:
|
|
||||||
|
|
||||||
# if 'daily' in query and specs == {}:
|
class DailySolver(Solver):
|
||||||
# # No daily build has been found.
|
"""Specific solver to daily queries."""
|
||||||
# # Rerun the query but with 'current'
|
|
||||||
# named_version = Base.namedver('current')
|
|
||||||
|
|
||||||
# if specs != {}:
|
def __init__(self, text: str, default_to_current = False):
|
||||||
# if isinstance(specs, list) and len(specs) > 1:
|
super().__init__(text, default_to_current)
|
||||||
# for spec in specs:
|
self.baseurl = DAILY
|
||||||
# remotebuild = RemoteBuild(query)
|
|
||||||
# if 'daily' in query:
|
|
||||||
# remotebuild.daily(spec)
|
|
||||||
# else:
|
|
||||||
# remotebuild.from_version(spec['version'])
|
|
||||||
|
|
||||||
# retval.append(remotebuild)
|
def solve(self):
|
||||||
|
"""Get daily urls based on query."""
|
||||||
|
x = "//td/a[starts-with(text(),'Linux-rpm_deb-x86') and contains(text(),'TDF/')]/text()"
|
||||||
|
tinderbox_segment = match_xpath(self.baseurl, x)[-1]
|
||||||
|
self.baseurl = self.baseurl + tinderbox_segment
|
||||||
|
|
||||||
# else:
|
# Reiterate now to search for the dated version
|
||||||
# # Possibly single instance of dict
|
xpath_query = "//td/a/text()"
|
||||||
# remotebuild = RemoteBuild(query)
|
daily_set = match_xpath(self.baseurl, xpath_query)
|
||||||
# remotebuild.from_version(specs['version'])
|
|
||||||
# retval.append(remotebuild)
|
|
||||||
# else:
|
|
||||||
# return retval
|
|
||||||
|
|
||||||
# # If the query is convertible in integer, we are still asking for a
|
matching = ''
|
||||||
# # daily build in a specific day
|
today = datetime.datetime.today()
|
||||||
# specs = Base.dailyver(datetime.datetime.strptime(query, '%Y%m%d'))
|
try:
|
||||||
# if isinstance(specs, list) and len(specs) > 1:
|
int(self.text)
|
||||||
# for spec in specs:
|
matching = datetime.datetime.strptime(self.text, "%Y%m%d").strftime('%Y-%m-%d')
|
||||||
# remotebuild = RemoteBuild('daily')
|
except ValueError:
|
||||||
# remotebuild.daily(spec)
|
# All textual version
|
||||||
# retval.append(remotebuild)
|
if self.text == 'current':
|
||||||
# else:
|
matching = 'current'
|
||||||
# remotebuild = RemoteBuild('daily')
|
elif self.text == 'daily':
|
||||||
# remotebuild.daily(specs)
|
matching = today.strftime('%Y-%m-%d')
|
||||||
# retval.append(remotebuild)
|
elif self.text == 'yesterday':
|
||||||
|
matching = (today + datetime.timedelta(days=-1)).strftime("%Y-%m-%d")
|
||||||
|
|
||||||
# return sorted(retval, key=lambda x: x.version)
|
results = sorted([ x for x in daily_set if matching in x ])
|
||||||
|
if len(results) == 0:
|
||||||
|
# No daily versions found.
|
||||||
class RemoteBuild():
|
if self.default_to_current:
|
||||||
"""Builds a version with checking remotely if it was not already built."""
|
solver = DailySolver('current')
|
||||||
|
self.version = solver.version
|
||||||
def __init__(self, query, version = None):
|
self.baseurl = solver.baseurl
|
||||||
"""Should simplify the single builded version."""
|
|
||||||
self.query = query
|
|
||||||
self.version = version or ''
|
|
||||||
self.basedirurl = { 'x86': '-', 'x86_64': '-' }
|
|
||||||
|
|
||||||
def from_version(self, version):
|
|
||||||
if version and isinstance(version, str):
|
|
||||||
self.version = version
|
|
||||||
|
|
||||||
if not '.' in self.query:
|
|
||||||
# Named version.
|
|
||||||
# Let's check if a specific version was requested.
|
|
||||||
if self.version == '':
|
|
||||||
# In case it was not requested, we will carry on the generic
|
|
||||||
# namedver() query.
|
|
||||||
# If the results are more than one, we'll take the latest
|
|
||||||
# (since we are requested to provide a single build).
|
|
||||||
a_version = Base.namedver(self.query)
|
|
||||||
|
|
||||||
if isinstance(a_version, list):
|
|
||||||
# if the number of versions is zero, return and exit
|
|
||||||
if not a_version:
|
|
||||||
self.version = None
|
|
||||||
|
|
||||||
if len(a_version) == 1:
|
|
||||||
# version is a single one.
|
|
||||||
self.version = a_version[0]
|
|
||||||
else:
|
|
||||||
# In this case, we will select the latest release.
|
|
||||||
self.version = sorted(a_version)[-1]
|
|
||||||
|
|
||||||
# If the version has already a version, as requested by user,
|
|
||||||
# continue using that version
|
|
||||||
else:
|
else:
|
||||||
# In case of numbered queries, put it as initial version
|
self.baseurl = self.baseurl + results[-1]
|
||||||
self.version = self.query
|
|
||||||
|
|
||||||
if len(str(self.version).split('.')) < 4:
|
xpath_string = "//td/a[contains(text(), '_deb.tar.gz')]/text()"
|
||||||
# If not 4 dotted, let's search for the 4 dotted version
|
links = match_xpath(self.baseurl, xpath_string)
|
||||||
self.version = Base.fullversion(self.version)
|
if len(links) > 0:
|
||||||
|
link = str(links[-1])
|
||||||
|
self.version = link.rsplit('/', maxsplit=1)[-1].split('_')[1]
|
||||||
|
|
||||||
self.basedirurl = Base.urlfromqueryandver(self.query, self.version)
|
return self.version
|
||||||
|
|
||||||
def daily(self, dailyspecs):
|
|
||||||
"""Builds a remote version starting from dailyver dictionary."""
|
|
||||||
self.version = dailyspecs['version']
|
|
||||||
self.basedirurl = dailyspecs['basedirurl']
|
|
||||||
|
|
||||||
|
|
||||||
|
class NamedSolver(Solver):
|
||||||
|
"""Solves the query knowing that the input is a named query."""
|
||||||
|
|
||||||
|
def __init__(self, text: str):
|
||||||
|
super().__init__(text)
|
||||||
|
self.baseurl = SELECTORS[self.text]['URL']
|
||||||
|
self.generalver = ''
|
||||||
|
|
||||||
|
def solve(self):
|
||||||
|
"""Get versions from query."""
|
||||||
|
xpath_query = SELECTORS[self.text]['xpath']
|
||||||
|
results = sorted(match_xpath(self.baseurl, xpath_query))
|
||||||
|
|
||||||
|
if len(results) > 0:
|
||||||
|
self.generalver = str(results[-1])
|
||||||
|
|
||||||
|
result: str = self.generalver
|
||||||
|
xpath_string = f"//td/a[starts-with(text(),'{result}')]/text()"
|
||||||
|
archived_versions = sorted(match_xpath(ARCHIVE, xpath_string))
|
||||||
|
|
||||||
|
if len(archived_versions) == 0:
|
||||||
|
return self.version
|
||||||
|
|
||||||
|
# Return just the last versions
|
||||||
|
fullversion: str = str(archived_versions[-1])
|
||||||
|
self.baseurl = ARCHIVE + fullversion + 'deb/x86_64/'
|
||||||
|
self.version = fullversion.rstrip('/')
|
||||||
|
|
||||||
|
return self.version
|
||||||
|
|
||||||
|
|
||||||
|
class NumberedSolver(Solver):
|
||||||
|
"""Specific solver for numbered versions."""
|
||||||
|
|
||||||
|
def __init__(self, text: str):
|
||||||
|
super().__init__(text)
|
||||||
|
|
||||||
|
def solve(self):
|
||||||
|
xpath_string = f"//td/a[starts-with(text(),'{self.text}')]/text()"
|
||||||
|
versions = sorted(match_xpath(self.baseurl, xpath_string))
|
||||||
|
if len(versions) == 0:
|
||||||
|
# It is possible that in the ARCHIVE there's no such version (might be a prerelease)
|
||||||
|
return self.version
|
||||||
|
|
||||||
|
version = str(versions[-1])
|
||||||
|
self.baseurl = self.baseurl + version + 'deb/x86_64/'
|
||||||
|
self.version = version.rstrip('/')
|
||||||
|
|
||||||
|
return self.version
|
||||||
|
|
152
loaih/build.py
152
loaih/build.py
|
@ -9,6 +9,7 @@ import shutil
|
||||||
import re
|
import re
|
||||||
import shlex
|
import shlex
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import urllib.error
|
||||||
import urllib.request
|
import urllib.request
|
||||||
import hashlib
|
import hashlib
|
||||||
from lxml import etree
|
from lxml import etree
|
||||||
|
@ -20,11 +21,14 @@ class Collection(list):
|
||||||
def __init__(self, query, arch = ['x86', 'x86_64']):
|
def __init__(self, query, arch = ['x86', 'x86_64']):
|
||||||
"""Build a list of version to check/build for this round."""
|
"""Build a list of version to check/build for this round."""
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.extend([
|
|
||||||
Build(query, arch, version) for version in loaih.Base.collectedbuilds(query)
|
|
||||||
])
|
|
||||||
|
|
||||||
class Build(loaih.RemoteBuild):
|
version = loaih.Solver.parse(query)
|
||||||
|
|
||||||
|
# If a version is not buildable, discard it now!
|
||||||
|
arch = [ x for x in arch if version.urls[x] != '-' ]
|
||||||
|
self.extend([ Build(version, ar) for ar in arch ])
|
||||||
|
|
||||||
|
class Build():
|
||||||
"""Builds a single version."""
|
"""Builds a single version."""
|
||||||
|
|
||||||
LANGSTD = [ 'ar', 'de', 'en-GB', 'es', 'fr', 'it', 'ja', 'ko', 'pt',
|
LANGSTD = [ 'ar', 'de', 'en-GB', 'es', 'fr', 'it', 'ja', 'ko', 'pt',
|
||||||
|
@ -32,16 +36,22 @@ class Build(loaih.RemoteBuild):
|
||||||
LANGBASIC = [ 'en-GB' ]
|
LANGBASIC = [ 'en-GB' ]
|
||||||
ARCHSTD = [ 'x86', 'x86_64' ]
|
ARCHSTD = [ 'x86', 'x86_64' ]
|
||||||
|
|
||||||
def __init__(self, query, arch, version = None):
|
def __init__(self, version: loaih.Version, arch):
|
||||||
super().__init__(query, version)
|
self.version = version
|
||||||
|
self.tidy_folder = True
|
||||||
|
self.verbose = True
|
||||||
self.arch = arch
|
self.arch = arch
|
||||||
self.short_version = str.join('.', self.version.split('.')[0:2])
|
self.short_version = str.join('.', self.version.version.split('.')[0:2])
|
||||||
self.branch_version = None
|
self.branch_version = None
|
||||||
if not '.' in self.query:
|
if not '.' in self.version.query:
|
||||||
self.branch_version = self.query
|
self.branch_version = self.version.query
|
||||||
self.url = self.basedirurl
|
numeric = re.match(r'^[0-9]{8}$', self.version.query)
|
||||||
|
if numeric or self.version.query in { 'yesterday', 'current' }:
|
||||||
|
self.branch_version = 'daily'
|
||||||
|
self.url = self.version.urls[arch]
|
||||||
|
|
||||||
# Other default values
|
# Other default values - for structured builds
|
||||||
|
# Most likely will be overridden by cli
|
||||||
self.language = 'basic'
|
self.language = 'basic'
|
||||||
self.offline_help = False
|
self.offline_help = False
|
||||||
self.portable = False
|
self.portable = False
|
||||||
|
@ -81,19 +91,22 @@ class Build(loaih.RemoteBuild):
|
||||||
def calculate(self):
|
def calculate(self):
|
||||||
"""Calculate exclusions and other variables."""
|
"""Calculate exclusions and other variables."""
|
||||||
|
|
||||||
print("--- Calculate Phase ---")
|
if self.verbose:
|
||||||
|
print("--- Calculate Phase ---")
|
||||||
|
|
||||||
# let's check here if we are on a remote repo or local.
|
# let's check here if we are on a remote repo or local.
|
||||||
if self.storage_path.startswith("http"):
|
if self.storage_path.startswith("http"):
|
||||||
# Final repository is remote
|
# Final repository is remote
|
||||||
self.repo_type = 'remote'
|
self.repo_type = 'remote'
|
||||||
print("Repo is remote.")
|
if self.verbose:
|
||||||
|
print("Repo is remote.")
|
||||||
else:
|
else:
|
||||||
self.repo_type = 'local'
|
self.repo_type = 'local'
|
||||||
print("Repo is local.")
|
if self.verbose:
|
||||||
|
print("Repo is local.")
|
||||||
|
|
||||||
# AppName
|
# AppName
|
||||||
if self.query in { 'prerelease', 'daily' }:
|
if self.branch_version in { 'prerelease', 'daily' }:
|
||||||
self.appname = 'LibreOfficeDev'
|
self.appname = 'LibreOfficeDev'
|
||||||
|
|
||||||
# Calculating languagepart
|
# Calculating languagepart
|
||||||
|
@ -126,14 +139,15 @@ class Build(loaih.RemoteBuild):
|
||||||
def __calculate_full_path__(self):
|
def __calculate_full_path__(self):
|
||||||
"""Calculate relative path of the build, based on internal other variables."""
|
"""Calculate relative path of the build, based on internal other variables."""
|
||||||
if len(self.relative_path) == 0:
|
if len(self.relative_path) == 0:
|
||||||
if self.query == 'daily':
|
if self.tidy_folder:
|
||||||
self.relative_path.append('daily')
|
if self.branch_version == 'daily':
|
||||||
elif self.query == 'prerelease':
|
self.relative_path.append('daily')
|
||||||
self.relative_path.append('prerelease')
|
elif self.query == 'prerelease':
|
||||||
|
self.relative_path.append('prerelease')
|
||||||
|
|
||||||
# Not the same check, an additional one
|
# Not the same check, an additional one
|
||||||
if self.portable:
|
if self.portable:
|
||||||
self.relative_path.append('portable')
|
self.relative_path.append('portable')
|
||||||
|
|
||||||
# Fullpath might be intended two ways:
|
# Fullpath might be intended two ways:
|
||||||
if self.repo_type == 'remote':
|
if self.repo_type == 'remote':
|
||||||
|
@ -156,13 +170,15 @@ class Build(loaih.RemoteBuild):
|
||||||
def check(self):
|
def check(self):
|
||||||
"""Checking if the requested AppImage has been already built."""
|
"""Checking if the requested AppImage has been already built."""
|
||||||
|
|
||||||
print("--- Check Phase ---")
|
if self.verbose:
|
||||||
|
print("--- Check Phase ---")
|
||||||
|
|
||||||
if len(self.appimagefilename) != 2:
|
if len(self.appimagefilename) != 2:
|
||||||
self.calculate()
|
self.calculate()
|
||||||
|
|
||||||
for arch in self.arch:
|
for arch in self.arch:
|
||||||
print(f"Searching for {self.appimagefilename[arch]}")
|
if self.verbose:
|
||||||
|
print(f"Searching for {self.appimagefilename[arch]}")
|
||||||
# First, check if by metadata the repo is remote or not.
|
# First, check if by metadata the repo is remote or not.
|
||||||
if self.repo_type == 'remote':
|
if self.repo_type == 'remote':
|
||||||
# Remote storage. I have to query a remote site to know if it
|
# Remote storage. I have to query a remote site to know if it
|
||||||
|
@ -202,36 +218,33 @@ class Build(loaih.RemoteBuild):
|
||||||
self.built[arch] = True
|
self.built[arch] = True
|
||||||
|
|
||||||
if self.built[arch]:
|
if self.built[arch]:
|
||||||
print(f"Found requested AppImage: {self.appimagefilename[arch]}.")
|
if self.verbose:
|
||||||
|
print(f"Found requested AppImage: {self.appimagefilename[arch]}.")
|
||||||
|
|
||||||
|
|
||||||
def download(self):
|
def download(self):
|
||||||
"""Downloads the contents of the URL as it was a folder."""
|
"""Downloads the contents of the URL as it was a folder."""
|
||||||
|
|
||||||
print("--- Download Phase ---")
|
if self.verbose:
|
||||||
|
print("--- Download Phase ---")
|
||||||
print(f"Started downloads for {self.version}. Please wait.")
|
print(f"Started downloads for {self.version}. Please wait.")
|
||||||
for arch in self.arch:
|
for arch in self.arch:
|
||||||
# Checking if a valid path has been provided
|
# Checking if a valid path has been provided
|
||||||
if self.url[arch] == '-':
|
if self.url[arch] == '-':
|
||||||
print(f"Cannot build for arch {arch}. Continuing with other arches.")
|
if self.verbose:
|
||||||
|
print(f"Cannot build for arch {arch}. Continuing with other arches.")
|
||||||
# Faking already built it so to skip other checks.
|
# Faking already built it so to skip other checks.
|
||||||
self.built[arch] = True
|
self.built[arch] = True
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if self.built[arch]:
|
if self.built[arch]:
|
||||||
print(f"A build for {arch} was already found. Skipping specific packages.")
|
if self.verbose:
|
||||||
|
print(f"A build for {arch} was already found. Skipping specific packages.")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Identifying downloads
|
# Identifying downloads
|
||||||
contents = []
|
contents = loaih.match_xpath(self.url[arch], "//td/a/text()")
|
||||||
with urllib.request.urlopen(self.url[arch]) as url:
|
self.tarballs[arch] = [ x for x in contents if x.endswith('tar.gz') and 'deb' in x ]
|
||||||
contents = etree.HTML(url.read()).xpath("//td/a")
|
|
||||||
|
|
||||||
self.tarballs[arch] = [ x.text
|
|
||||||
for x in contents
|
|
||||||
if x.text.endswith('tar.gz') and 'deb' in x.text
|
|
||||||
]
|
|
||||||
tarballs = self.tarballs[arch]
|
tarballs = self.tarballs[arch]
|
||||||
|
|
||||||
# Create and change directory to the download location
|
# Create and change directory to the download location
|
||||||
|
@ -241,7 +254,7 @@ class Build(loaih.RemoteBuild):
|
||||||
# If the archive is already there, do not do anything.
|
# If the archive is already there, do not do anything.
|
||||||
# If it is a daily build or a pre-release, due to filename
|
# If it is a daily build or a pre-release, due to filename
|
||||||
# clashes, redownload the whole build.
|
# clashes, redownload the whole build.
|
||||||
if os.path.exists(archive) and self.query not in { 'daily', 'prerelease' }:
|
if os.path.exists(archive) and self.version.query not in { 'daily', 'prerelease' }:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Download the archive
|
# Download the archive
|
||||||
|
@ -250,12 +263,14 @@ class Build(loaih.RemoteBuild):
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
print(f"Failed to download {archive}: {error}.")
|
print(f"Failed to download {archive}: {error}.")
|
||||||
|
|
||||||
print(f"Finished downloads for {self.version}.")
|
if self.verbose:
|
||||||
|
print(f"Finished downloads for {self.version}.")
|
||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
"""Building all the versions."""
|
"""Building all the versions."""
|
||||||
|
|
||||||
print("--- Building Phase ---")
|
if self.verbose:
|
||||||
|
print("--- Building Phase ---")
|
||||||
|
|
||||||
for arch in self.arch:
|
for arch in self.arch:
|
||||||
if self.built[arch]:
|
if self.built[arch]:
|
||||||
|
@ -388,12 +403,20 @@ class Build(loaih.RemoteBuild):
|
||||||
|
|
||||||
buildopts_str = str.join(' ', buildopts)
|
buildopts_str = str.join(' ', buildopts)
|
||||||
# Build the number-specific build
|
# Build the number-specific build
|
||||||
subprocess.run(shlex.split(
|
if self.verbose:
|
||||||
f"{self.appnamedir}/appimagetool {buildopts_str} -v " +
|
subprocess.run(shlex.split(
|
||||||
f"./{self.appname}.AppDir/"
|
f"{self.appnamedir}/appimagetool {buildopts_str} -v " +
|
||||||
), env={ "VERSION": self.appversion }, check=True)
|
f"./{self.appname}.AppDir/"
|
||||||
|
), env={ "VERSION": self.appversion }, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, check=True)
|
||||||
|
else:
|
||||||
|
subprocess.run(shlex.split(
|
||||||
|
f"{self.appnamedir}/appimagetool {buildopts_str} -v " +
|
||||||
|
f"./{self.appname}.AppDir/"
|
||||||
|
), env={ "VERSION": self.appversion }, check=True)
|
||||||
|
|
||||||
print(f"Built AppImage version {self.appversion}")
|
|
||||||
|
if self.verbose:
|
||||||
|
print(f"Built AppImage version {self.appversion}")
|
||||||
|
|
||||||
# Cleanup phase, before new run.
|
# Cleanup phase, before new run.
|
||||||
for deb in glob.glob(self.appnamedir + '/*.deb'):
|
for deb in glob.glob(self.appnamedir + '/*.deb'):
|
||||||
|
@ -406,8 +429,9 @@ class Build(loaih.RemoteBuild):
|
||||||
def checksums(self):
|
def checksums(self):
|
||||||
"""Create checksums of the built versions."""
|
"""Create checksums of the built versions."""
|
||||||
# Skip checksum if initally the build was already found in the storage directory
|
# Skip checksum if initally the build was already found in the storage directory
|
||||||
|
|
||||||
print("--- Checksum Phase ---")
|
if self.verbose:
|
||||||
|
print("--- Checksum Phase ---")
|
||||||
|
|
||||||
if all(self.built[arch] for arch in self.arch):
|
if all(self.built[arch] for arch in self.arch):
|
||||||
return
|
return
|
||||||
|
@ -448,7 +472,8 @@ class Build(loaih.RemoteBuild):
|
||||||
def publish(self):
|
def publish(self):
|
||||||
"""Moves built versions to definitive storage."""
|
"""Moves built versions to definitive storage."""
|
||||||
|
|
||||||
print("--- Publish Phase ---")
|
if self.verbose:
|
||||||
|
print("--- Publish Phase ---")
|
||||||
|
|
||||||
if all(self.built[arch] for arch in self.arch):
|
if all(self.built[arch] for arch in self.arch):
|
||||||
# All files are already present in the full_path
|
# All files are already present in the full_path
|
||||||
|
@ -461,11 +486,19 @@ class Build(loaih.RemoteBuild):
|
||||||
# Build destination directory
|
# Build destination directory
|
||||||
remotepath = self.remote_path.rstrip('/') + self.full_path
|
remotepath = self.remote_path.rstrip('/') + self.full_path
|
||||||
try:
|
try:
|
||||||
subprocess.run(
|
if self.verbose:
|
||||||
r"rsync -rlIvz --munge-links *.AppImage* " +
|
subprocess.run(
|
||||||
f"{self.remote_host}:{remotepath}",
|
r"rsync -rlIvz --munge-links *.AppImage* " +
|
||||||
cwd=self.appnamedir, shell=True, check=True
|
f"{self.remote_host}:{remotepath}",
|
||||||
)
|
cwd=self.appnamedir, shell=True, check=True
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
subprocess.run(
|
||||||
|
r"rsync -rlIvz --munge-links *.AppImage* " +
|
||||||
|
f"{self.remote_host}:{remotepath}",
|
||||||
|
cwd=self.appnamedir, shell=True, check=True,
|
||||||
|
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL
|
||||||
|
)
|
||||||
finally:
|
finally:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -482,14 +515,15 @@ class Build(loaih.RemoteBuild):
|
||||||
def generalize_and_link(self, chdir = 'default'):
|
def generalize_and_link(self, chdir = 'default'):
|
||||||
"""Creates the needed generalized files if needed."""
|
"""Creates the needed generalized files if needed."""
|
||||||
|
|
||||||
print("--- Generalize and Link Phase ---")
|
if self.verbose:
|
||||||
|
print("--- Generalize and Link Phase ---")
|
||||||
|
|
||||||
# If called with a pointed version, no generalize and link necessary.
|
# If called with a pointed version, no generalize and link necessary.
|
||||||
if not self.branch_version:
|
if not self.branch_version:
|
||||||
return
|
return
|
||||||
|
|
||||||
# If a prerelease or a daily version, either.
|
# If a prerelease or a daily version, either.
|
||||||
if self.query in { 'daily', 'prerelease' }:
|
if self.version.query in { 'daily', 'prerelease' }:
|
||||||
return
|
return
|
||||||
|
|
||||||
if chdir == 'default':
|
if chdir == 'default':
|
||||||
|
@ -519,7 +553,8 @@ class Build(loaih.RemoteBuild):
|
||||||
zsyncfilename[arch] = appimagefilename[arch] + '.zsync'
|
zsyncfilename[arch] = appimagefilename[arch] + '.zsync'
|
||||||
|
|
||||||
# Create the symlink
|
# Create the symlink
|
||||||
print(f"Creating {appimagefilename[arch]} and checksums.")
|
if self.verbose:
|
||||||
|
print(f"Creating {appimagefilename[arch]} and checksums.")
|
||||||
if os.path.exists(appimagefilename[arch]):
|
if os.path.exists(appimagefilename[arch]):
|
||||||
os.unlink(appimagefilename[arch])
|
os.unlink(appimagefilename[arch])
|
||||||
os.symlink(self.appimagefilename[arch], appimagefilename[arch])
|
os.symlink(self.appimagefilename[arch], appimagefilename[arch])
|
||||||
|
@ -529,7 +564,8 @@ class Build(loaih.RemoteBuild):
|
||||||
if not self.updatable:
|
if not self.updatable:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
print(f"Creating zsync file for version {version}.")
|
if self.verbose:
|
||||||
|
print(f"Creating zsync file for version {version}.")
|
||||||
if os.path.exists(zsyncfilename[arch]):
|
if os.path.exists(zsyncfilename[arch]):
|
||||||
os.unlink(zsyncfilename[arch])
|
os.unlink(zsyncfilename[arch])
|
||||||
shutil.copyfile(self.zsyncfilename[arch], zsyncfilename[arch])
|
shutil.copyfile(self.zsyncfilename[arch], zsyncfilename[arch])
|
||||||
|
|
|
@ -2,7 +2,8 @@
|
||||||
# encoding: utf-8
|
# encoding: utf-8
|
||||||
"""Helps with command line commands."""
|
"""Helps with command line commands."""
|
||||||
|
|
||||||
import os, sys
|
import os
|
||||||
|
import sys
|
||||||
import json
|
import json
|
||||||
import click
|
import click
|
||||||
import yaml
|
import yaml
|
||||||
|
@ -15,8 +16,9 @@ def cli():
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option('-j', '--json', 'jsonout', default=False, is_flag=True, help="Output format in json.")
|
@click.option('-j', '--json', 'jsonout', default=False, is_flag=True, help="Output format in json.")
|
||||||
|
@click.option('--default-to-current', '-d', is_flag=True, default=False, help="If no versions are found, default to current one (for daily builds). Default: do not default to current.")
|
||||||
@click.argument('query')
|
@click.argument('query')
|
||||||
def getversion(query, jsonout):
|
def getversion(query, jsonout, default_to_current):
|
||||||
"""Get download information for named or numbered versions."""
|
"""Get download information for named or numbered versions."""
|
||||||
|
|
||||||
batchlist = []
|
batchlist = []
|
||||||
|
@ -27,12 +29,13 @@ def getversion(query, jsonout):
|
||||||
queries.append(query)
|
queries.append(query)
|
||||||
|
|
||||||
for singlequery in queries:
|
for singlequery in queries:
|
||||||
elem = loaih.RemoteBuild(singlequery)
|
elem = loaih.Solver.parse(singlequery, default_to_current)
|
||||||
batchlist.extend(loaih.Base.collectedbuilds(singlequery))
|
if elem.version not in { None, "" }:
|
||||||
|
batchlist.append(elem)
|
||||||
|
|
||||||
if len(batchlist) > 0:
|
if len(batchlist) > 0:
|
||||||
if jsonout:
|
if jsonout:
|
||||||
click.echo(json.dumps([x.todict() for x in batchlist]))
|
click.echo(json.dumps([x.to_dict() for x in batchlist ]))
|
||||||
else:
|
else:
|
||||||
for value in batchlist:
|
for value in batchlist:
|
||||||
click.echo(value)
|
click.echo(value)
|
||||||
|
@ -64,6 +67,7 @@ def batch(yamlfile, verbose):
|
||||||
|
|
||||||
for obj in collection:
|
for obj in collection:
|
||||||
# Configuration phase
|
# Configuration phase
|
||||||
|
obj.verbose = verbose
|
||||||
obj.language = cbuild['language']
|
obj.language = cbuild['language']
|
||||||
obj.offline_help = cbuild['offline_help']
|
obj.offline_help = cbuild['offline_help']
|
||||||
obj.portable = cbuild['portable']
|
obj.portable = cbuild['portable']
|
||||||
|
@ -101,30 +105,29 @@ def batch(yamlfile, verbose):
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option('-a', '--arch', 'arch', default='all',
|
@click.option('-a', '--arch', 'arch', default='x86_64',
|
||||||
type=click.Choice(['x86_64', 'x86', 'all'], case_sensitive=False),
|
type=click.Choice(['x86', 'x86_64', 'all'], case_sensitive=False), help="Build the AppImage for a specific architecture. If there is no specific options, the process will build for both architectures (if available). Default: x86_64")
|
||||||
help="Build the AppImage for a specific architecture. If there is no specific options, the process will build for both architectures (if available). Default: x86_64")
|
|
||||||
@click.option('--check', '-c', is_flag=True, default=False,
|
@click.option('--check', '-c', is_flag=True, default=False,
|
||||||
help="Check in the repository path if the queried version is existent. Default: do not check")
|
help="Checks in the repository path if the queried version is existent. Default: do not check")
|
||||||
@click.option('-d', '--download-path', 'download_path',
|
@click.option('-d', '--download-path', 'download_path', default = '/var/tmp/downloads', type=str, help="Path to the download folder. Default: /var/tmp/downloads")
|
||||||
default = '/var/tmp/downloads', type=str,
|
@click.option('-l', '--language', 'language', default = 'basic', type=str, help="Languages to be included. Options: basic, standard, full, a language string (e.g. 'it') or a list of languages comma separated (e.g.: 'en-US,en-GB,it'). Default: basic")
|
||||||
help="Path to the download folder. Default: /var/tmp/downloads")
|
@click.option('-o/-O', '--offline-help/--no-offline-help', 'offline', default = False, help="Include or not the offline help for the chosen languages. Default: no offline help")
|
||||||
@click.option('-l', '--language', 'language', default = 'basic', type=str,
|
|
||||||
help="Languages to be included. Options: basic, standard, full, a language string (e.g. 'it') or a list of languages comma separated (e.g.: 'en-US,en-GB,it'). Default: basic")
|
|
||||||
@click.option('-o/-O', '--offline-help/--no-offline-help', 'offline', default = False,
|
|
||||||
help="Include or not the offline help for the chosen languages. Default: no offline help")
|
|
||||||
@click.option('-p/-P', '--portable/--no-portable', 'portable', default = False,
|
@click.option('-p/-P', '--portable/--no-portable', 'portable', default = False,
|
||||||
help="Create a portable version of the AppImage or not. Default: no portable")
|
help="Create a portable version of the AppImage or not. Default: no portable")
|
||||||
@click.option('-r', '--repo-path', 'repo_path', default = '/mnt/appimage',
|
@click.option('-r', '--repo-path', 'repo_path', default = '.', type=str, help="Path to the final storage of the AppImage. Default: current directory")
|
||||||
type=str, help="Path to the final storage of the AppImage. Default: /mnt/appimage")
|
@click.option('-s/-S', '--sign/--no-sign', 'sign', default=True, help="Wether to sign the build. Default: sign")
|
||||||
@click.option('-s/-S', '--sign/--no-sign', 'sign', default=True,
|
@click.option('-u/-U', '--updatable/--no-updatable', 'updatable', default = True, help="Create an updatable version of the AppImage or not. Default: updatable")
|
||||||
help="Wether to sign the build. Default: sign")
|
|
||||||
@click.option('-u/-U', '--updatable/--no-updatable', 'updatable', default = True,
|
|
||||||
help="Create an updatable version of the AppImage or not. Default: updatable")
|
|
||||||
@click.argument('query')
|
@click.argument('query')
|
||||||
def build(arch, language, offline, portable, updatable, download_path, repo_path, check, sign, query):
|
def build(arch, language, offline, portable, updatable, download_path, repo_path, check, sign, query):
|
||||||
"""Builds an Appimage with the provided options."""
|
"""Builds an Appimage with the provided options."""
|
||||||
|
|
||||||
|
# Multiple query support
|
||||||
|
queries = []
|
||||||
|
if ',' in query:
|
||||||
|
queries.extend(query.split(','))
|
||||||
|
else:
|
||||||
|
queries.append(query)
|
||||||
|
|
||||||
# Parsing options
|
# Parsing options
|
||||||
arches = []
|
arches = []
|
||||||
if arch.lower() == 'all':
|
if arch.lower() == 'all':
|
||||||
|
@ -133,31 +136,32 @@ def build(arch, language, offline, portable, updatable, download_path, repo_path
|
||||||
else:
|
else:
|
||||||
arches = [ arch.lower() ]
|
arches = [ arch.lower() ]
|
||||||
|
|
||||||
|
for q in queries:
|
||||||
|
collection = loaih.build.Collection(q, arches)
|
||||||
|
for appbuild in collection:
|
||||||
|
# Configuration phase
|
||||||
|
appbuild.tidy_folder = False
|
||||||
|
appbuild.language = language
|
||||||
|
appbuild.offline_help = offline
|
||||||
|
appbuild.portable = portable
|
||||||
|
appbuild.updatable = updatable
|
||||||
|
if repo_path == '.':
|
||||||
|
repo_path = os.getcwd()
|
||||||
|
appbuild.storage_path = repo_path
|
||||||
|
appbuild.download_path = download_path
|
||||||
|
|
||||||
collection = loaih.build.Collection(query, arches)
|
if sign:
|
||||||
for obj in collection:
|
appbuild.sign = True
|
||||||
# Configuration phase
|
|
||||||
obj.language = language
|
|
||||||
obj.offline_help = offline
|
|
||||||
obj.portable = portable
|
|
||||||
obj.updatable = updatable
|
|
||||||
if repo_path == '.':
|
|
||||||
repo_path = os.getcwd()
|
|
||||||
obj.storage_path = repo_path
|
|
||||||
obj.download_path = download_path
|
|
||||||
|
|
||||||
if sign:
|
# Running phase
|
||||||
obj.sign = True
|
appbuild.calculate()
|
||||||
|
|
||||||
# Running phase
|
if check:
|
||||||
obj.calculate()
|
appbuild.check()
|
||||||
|
|
||||||
if check:
|
appbuild.download()
|
||||||
obj.check()
|
appbuild.appbuild()
|
||||||
|
appbuild.checksums()
|
||||||
obj.download()
|
appbuild.publish()
|
||||||
obj.build()
|
appbuild.generalize_and_link()
|
||||||
obj.checksums()
|
del appbuild
|
||||||
obj.publish()
|
|
||||||
obj.generalize_and_link()
|
|
||||||
del obj
|
|
||||||
|
|
|
@ -1,96 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
# encoding: utf-8
|
|
||||||
"""Solvers for the queries."""
|
|
||||||
|
|
||||||
import datetime
|
|
||||||
import requests
|
|
||||||
from lxml import html
|
|
||||||
|
|
||||||
class Solver():
|
|
||||||
"""Helps solving the queries to collections of versions."""
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def parse_query(query: Query):
|
|
||||||
"""Returns a list of versions for the query."""
|
|
||||||
|
|
||||||
retval = []
|
|
||||||
if query.type == 'daily':
|
|
||||||
solver = DailySolver(query.text)
|
|
||||||
retval.append(solver.to_version())
|
|
||||||
elif query.type == 'named':
|
|
||||||
solver = NamedSolver(query.text)
|
|
||||||
else:
|
|
||||||
solver = NumberedSolver(query.text)
|
|
||||||
|
|
||||||
return retval
|
|
||||||
|
|
||||||
|
|
||||||
class DailySolver():
|
|
||||||
def __init__(self, query, default_to_current = False):
|
|
||||||
self.query = query
|
|
||||||
self.default_to_current = default_to_current
|
|
||||||
self.version = ''
|
|
||||||
self.url = ''
|
|
||||||
self.__get_url__()
|
|
||||||
self.__get_version__()
|
|
||||||
|
|
||||||
def __get_url__(self):
|
|
||||||
"""Get daily urls based on query."""
|
|
||||||
# The base URL for daily releases is already determined. Let's define
|
|
||||||
# the definitive one.
|
|
||||||
|
|
||||||
if self.query != 'current':
|
|
||||||
baseurl = requests.get(Definitions.DAILY)
|
|
||||||
pageobj = html.fromstring(baseurl.content)
|
|
||||||
xpath_string = "//td/a[starts-with(text(), 'Linux-rpm_deb-x86') and contains(text(), 'TDF/')]/text()"
|
|
||||||
tinderbox_segment = str(pageobj.xpath(xpath_string)[-1])
|
|
||||||
baseurl= f"{Definitions.DAILY}{tinderbox_segment}"
|
|
||||||
|
|
||||||
# Reiterate now to search for the dated version
|
|
||||||
base_page = requests.get(baseurl)
|
|
||||||
baseobj = html.fromstring(base_page.content)
|
|
||||||
daily_set = baseobj.xpath("//td/a/text()")
|
|
||||||
|
|
||||||
searchdate = datetime.datetime.today()
|
|
||||||
if self.query == 'yesterday':
|
|
||||||
searchdate = searchdate + datetime.timedelta(days=-1)
|
|
||||||
else:
|
|
||||||
searchdate = datetime.datetime.strptime(self.query, '%Y%m%d')
|
|
||||||
|
|
||||||
search_results = [ x for x in daily_set if searchdate.strftime('%Y-%m-%d') in x ][-1]
|
|
||||||
|
|
||||||
if len(search_results) < 1:
|
|
||||||
# Searched date do not exist. if default_to_current is set,
|
|
||||||
# let's re-run the solver with 'current' query.
|
|
||||||
if self.default_to_current:
|
|
||||||
current = DailySolver('current')
|
|
||||||
current.query = self.query
|
|
||||||
self.url = current.url
|
|
||||||
|
|
||||||
else:
|
|
||||||
# We'll presume there will be just one result anyways.
|
|
||||||
self.url = f"{baseurl}{search_results[-1]}"
|
|
||||||
|
|
||||||
else:
|
|
||||||
# Current.
|
|
||||||
current_page = requests.get(f"{Definitions.DAILY}current.html")
|
|
||||||
current_obj = html.fromstring(current_page.content)
|
|
||||||
xpath_string = "//td/a[contains(@href, 'Linux-rpm_deb-x86') and contains(@href, 'TDF/') and contains(@href, 'deb.tar')]/@href"
|
|
||||||
current_link = str(current_obj.xpath(xpath_string)[-1])
|
|
||||||
|
|
||||||
split_link = str(current_link).split('/')
|
|
||||||
return '/'.join(split_link[1:-1]) + '/'
|
|
||||||
|
|
||||||
def __get_version__(self):
|
|
||||||
page = requests.get(self.baseurl)
|
|
||||||
obj = html.fromstring(page.content)
|
|
||||||
xpath_string = "//td/a[contains(text(), '_deb.tar.gz')]/text()"
|
|
||||||
link = str(obj.xpath(xpath_string)[-1])
|
|
||||||
self.version = link.split('/')[-1].split('_')[1]
|
|
||||||
|
|
||||||
def to_version(self):
|
|
||||||
version = Version()
|
|
||||||
version.query = self.query
|
|
||||||
version.version = self.version
|
|
||||||
version.urls['x86_64'] = self.url
|
|
||||||
return version
|
|
Loading…
Reference in New Issue