Compare commits
72 Commits
Author | SHA1 | Date |
---|---|---|
emiliano.vavassori | ad1ef60947 | |
emiliano.vavassori | 8a9541c4f6 | |
gabriele.ponzo | 2f31897cd9 | |
emiliano.vavassori | eff3bc67ee | |
emiliano.vavassori | 03fc28a1bf | |
emiliano.vavassori | 8c4bc65184 | |
emiliano.vavassori | 5d6c7f2df2 | |
emiliano.vavassori | 608eeb6071 | |
emiliano.vavassori | 2a617b1824 | |
emiliano.vavassori | 36da47b0b5 | |
emiliano.vavassori | 700d1eb376 | |
emiliano.vavassori | 3b01fc3b05 | |
emiliano.vavassori | 2277523f3c | |
emiliano.vavassori | 3dbfef9fbe | |
emiliano.vavassori | 1a24f54d89 | |
emiliano.vavassori | 024535afa9 | |
emiliano.vavassori | d9775f4f94 | |
emiliano.vavassori | 28528aa063 | |
emiliano.vavassori | 142a09df14 | |
emiliano.vavassori | 71a81b6a8e | |
emiliano.vavassori | bb1e73fd6c | |
emiliano.vavassori | a74b8d4858 | |
emiliano.vavassori | 8bd23dd08b | |
emiliano.vavassori | 7fe48c297d | |
emiliano.vavassori | 82e366c5da | |
emiliano.vavassori | 49e0ab5593 | |
emiliano.vavassori | 62248d862c | |
Emiliano Vavassori | 0dc3c97758 | |
Emiliano Vavassori | 8bc7290a6f | |
Emiliano Vavassori | 2fec2a2de6 | |
Emiliano Vavassori | bb002e88dc | |
Emiliano Vavassori | ad6d85e423 | |
Emiliano Vavassori | 0215324bba | |
Emiliano Vavassori | 47b8e0cf2a | |
Emiliano Vavassori | b087e85ec5 | |
Emiliano Vavassori | 3a9f13594c | |
Emiliano Vavassori | 1f83db6105 | |
Emiliano Vavassori | 8b5c87f801 | |
Emiliano Vavassori | 0bea7a81bc | |
Emiliano Vavassori | 78a43350ed | |
Emiliano Vavassori | 8c3e649a25 | |
Emiliano Vavassori | db01651251 | |
Emiliano Vavassori | 64effab3d7 | |
Emiliano Vavassori | 0a3f475fa6 | |
Emiliano Vavassori | 03620cf013 | |
Emiliano Vavassori | c015aeea99 | |
Emiliano Vavassori | 0a18586201 | |
Emiliano Vavassori | 5df2c5dbdb | |
Emiliano Vavassori | 41dcbe1718 | |
Emiliano Vavassori | 2405601d2d | |
Emiliano Vavassori | dff74f0a35 | |
Emiliano Vavassori | 6bfa6a3707 | |
Emiliano Vavassori | 059518ccbf | |
Emiliano Vavassori | 7013318188 | |
Emiliano Vavassori | f6bcf610ba | |
Emiliano Vavassori | 3aab2626ed | |
Emiliano Vavassori | df079c91b5 | |
Emiliano Vavassori | a0c4fbcad0 | |
Emiliano Vavassori | a316b85afe | |
Emiliano Vavassori | 60b246c548 | |
Emiliano Vavassori | a0c6217d95 | |
Emiliano Vavassori | f95c4d4b1d | |
Emiliano Vavassori | 9d259c4aa5 | |
Emiliano Vavassori | 07a895c86c | |
Emiliano Vavassori | 05533bf5e2 | |
Emiliano Vavassori | 38a78860b0 | |
Emiliano Vavassori | da31e1655b | |
Emiliano Vavassori | ae9668554a | |
Emiliano Vavassori | 9cf3119489 | |
Emiliano Vavassori | cbaf6b2e3c | |
Emiliano Vavassori | df5012eedd | |
Emiliano Vavassori | 2c19eefa05 |
37
.drone.yml
37
.drone.yml
|
@ -1,5 +1,6 @@
|
|||
---
|
||||
kind: pipeline
|
||||
type: docker
|
||||
name: default
|
||||
|
||||
steps:
|
||||
|
@ -8,33 +9,25 @@ steps:
|
|||
commands:
|
||||
- pip install wheel
|
||||
- python setup.py bdist_wheel
|
||||
- mkdir out
|
||||
- cp dist/*.whl out/
|
||||
when:
|
||||
event: tag
|
||||
|
||||
- name: release
|
||||
image: plugins/gitea-release
|
||||
settings:
|
||||
api_key:
|
||||
from_secret: gitea-deploy
|
||||
base_url: https://git.sys42.eu/
|
||||
files: out/*.whl
|
||||
checksum:
|
||||
- md5
|
||||
from_secret: loaih-deploy
|
||||
base_url: https://git.libreitalia.org
|
||||
files: dist/*.whl
|
||||
checksum: md5
|
||||
draft: true
|
||||
when:
|
||||
event: tag
|
||||
|
||||
- name: handycopy
|
||||
image: drillster/drone-rsync
|
||||
- name: publish
|
||||
image: plugins/pypi
|
||||
settings:
|
||||
hosts: deimos.sys42.eu
|
||||
user: syntaxerrormmm
|
||||
port: 45454
|
||||
key:
|
||||
from_secret: fisso-ssh-key
|
||||
source: out/*.whl
|
||||
target: ~/
|
||||
when:
|
||||
event: tag
|
||||
username: __token__
|
||||
password:
|
||||
from_secret: pypi
|
||||
|
||||
trigger:
|
||||
event:
|
||||
- tag
|
||||
- push
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
venv
|
||||
test
|
||||
build
|
||||
dist
|
||||
loaih.egg-info
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
# vim:sts=4:sw=4
|
||||
FROM python:3.9-slim-bullseye
|
||||
|
||||
RUN mkdir /build && \
|
||||
pip install loaih
|
||||
WORKDIR /build
|
||||
ENTRYPOINT [ "/usr/local/bin/loaih" ]
|
||||
CMD [ "--help" ]
|
|
@ -0,0 +1,18 @@
|
|||
# LibreOffice AppImage Helper - `loaih` #
|
||||
|
||||
LibreOffice AppImage Helper is an enhanced Python porting from [previous work
|
||||
by Antonio Faccioli](https://github.com/antoniofaccioli/libreoffice-appimage).
|
||||
It helps building a LibreOffice AppImage from officially released .deb files
|
||||
with some options.
|
||||
|
||||
## Getting options and help ##
|
||||
|
||||
You can ask the app some information on how you can use it:
|
||||
|
||||
$ loaih --help
|
||||
$ loaih getversion --help
|
||||
$ loaih build --help
|
||||
$ loaih batch --help
|
||||
|
||||
For any other information needed, please visit the [wiki of the
|
||||
project](https://git.libreitalia.org/libreitalia/loaih/wiki).
|
|
@ -11,10 +11,16 @@ if [[ ${retval} -ne 0 ]]; then
|
|||
# for the sake of consistency, let's make the check_updates.sh script
|
||||
# executable
|
||||
chmod +x check_updates.sh
|
||||
if [[ -d venv ]]; then
|
||||
source venv/bin/activate
|
||||
fi
|
||||
pip3 uninstall -y loaih
|
||||
# build the actual toolkit
|
||||
python3 setup.py bdist_wheel
|
||||
pip3 install dist/*.whl; rv=$?
|
||||
if [[ -d venv ]]; then
|
||||
deactivate
|
||||
fi
|
||||
|
||||
if [[ ${rv} -eq 0 ]]; then
|
||||
# cleanup
|
||||
|
|
74
daily.yml
74
daily.yml
|
@ -1,67 +1,69 @@
|
|||
---
|
||||
data:
|
||||
repo: /mnt/appimage
|
||||
repo: https://appimages.libreitalia.org
|
||||
remote_host: ciccio
|
||||
remote_path: /var/lib/nethserver/vhost/appimages
|
||||
download: /var/tmp/downloads
|
||||
force: no
|
||||
sign: yes
|
||||
force: false
|
||||
sign: true
|
||||
|
||||
builds:
|
||||
- query: daily
|
||||
language: basic
|
||||
offline_help: no
|
||||
portable: no
|
||||
|
||||
offline_help: false
|
||||
portable: false
|
||||
|
||||
- query: daily
|
||||
language: basic
|
||||
offline_help: yes
|
||||
portable: no
|
||||
|
||||
offline_help: true
|
||||
portable: false
|
||||
|
||||
- query: daily
|
||||
language: basic
|
||||
offline_help: no
|
||||
portable: yes
|
||||
|
||||
offline_help: false
|
||||
portable: true
|
||||
|
||||
- query: daily
|
||||
language: basic
|
||||
offline_help: yes
|
||||
portable: yes
|
||||
offline_help: true
|
||||
portable: true
|
||||
|
||||
- query: daily
|
||||
language: standard
|
||||
offline_help: no
|
||||
portable: no
|
||||
|
||||
offline_help: false
|
||||
portable: false
|
||||
|
||||
- query: daily
|
||||
language: standard
|
||||
offline_help: yes
|
||||
portable: no
|
||||
|
||||
offline_help: true
|
||||
portable: false
|
||||
|
||||
- query: daily
|
||||
language: standard
|
||||
offline_help: no
|
||||
portable: yes
|
||||
|
||||
offline_help: false
|
||||
portable: true
|
||||
|
||||
- query: daily
|
||||
language: standard
|
||||
offline_help: yes
|
||||
portable: yes
|
||||
offline_help: true
|
||||
portable: true
|
||||
|
||||
- query: daily
|
||||
language: full
|
||||
offline_help: no
|
||||
portable: no
|
||||
|
||||
offline_help: false
|
||||
portable: false
|
||||
|
||||
- query: daily
|
||||
language: full
|
||||
offline_help: yes
|
||||
portable: no
|
||||
|
||||
offline_help: true
|
||||
portable: false
|
||||
|
||||
- query: daily
|
||||
language: full
|
||||
offline_help: no
|
||||
portable: yes
|
||||
|
||||
offline_help: false
|
||||
portable: true
|
||||
|
||||
- query: daily
|
||||
language: full
|
||||
offline_help: yes
|
||||
portable: yes
|
||||
offline_help: true
|
||||
portable: true
|
||||
|
|
74
fresh.yml
74
fresh.yml
|
@ -1,67 +1,69 @@
|
|||
---
|
||||
data:
|
||||
repo: /mnt/appimage
|
||||
repo: https://appimages.libreitalia.org
|
||||
remote_host: ciccio
|
||||
remote_path: /var/lib/nethserver/vhost/appimages
|
||||
download: /var/tmp/downloads
|
||||
force: no
|
||||
sign: yes
|
||||
force: false
|
||||
sign: true
|
||||
|
||||
builds:
|
||||
- query: fresh
|
||||
language: basic
|
||||
offline_help: no
|
||||
portable: no
|
||||
|
||||
offline_help: false
|
||||
portable: false
|
||||
|
||||
- query: fresh
|
||||
language: basic
|
||||
offline_help: yes
|
||||
portable: no
|
||||
|
||||
offline_help: true
|
||||
portable: false
|
||||
|
||||
- query: fresh
|
||||
language: basic
|
||||
offline_help: no
|
||||
portable: yes
|
||||
|
||||
offline_help: false
|
||||
portable: true
|
||||
|
||||
- query: fresh
|
||||
language: basic
|
||||
offline_help: yes
|
||||
portable: yes
|
||||
offline_help: true
|
||||
portable: true
|
||||
|
||||
- query: fresh
|
||||
language: standard
|
||||
offline_help: no
|
||||
portable: no
|
||||
|
||||
offline_help: false
|
||||
portable: false
|
||||
|
||||
- query: fresh
|
||||
language: standard
|
||||
offline_help: yes
|
||||
portable: no
|
||||
|
||||
offline_help: true
|
||||
portable: false
|
||||
|
||||
- query: fresh
|
||||
language: standard
|
||||
offline_help: no
|
||||
portable: yes
|
||||
|
||||
offline_help: false
|
||||
portable: true
|
||||
|
||||
- query: fresh
|
||||
language: standard
|
||||
offline_help: yes
|
||||
portable: yes
|
||||
offline_help: true
|
||||
portable: true
|
||||
|
||||
- query: fresh
|
||||
language: full
|
||||
offline_help: no
|
||||
portable: no
|
||||
|
||||
offline_help: false
|
||||
portable: false
|
||||
|
||||
- query: fresh
|
||||
language: full
|
||||
offline_help: yes
|
||||
portable: no
|
||||
|
||||
offline_help: true
|
||||
portable: false
|
||||
|
||||
- query: fresh
|
||||
language: full
|
||||
offline_help: no
|
||||
portable: yes
|
||||
|
||||
offline_help: false
|
||||
portable: true
|
||||
|
||||
- query: fresh
|
||||
language: full
|
||||
offline_help: yes
|
||||
portable: yes
|
||||
offline_help: true
|
||||
portable: true
|
||||
|
|
|
@ -1,213 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
|
||||
import urllib.request
|
||||
from lxml import etree
|
||||
from packaging.version import parse as parse_version
|
||||
import datetime
|
||||
|
||||
class Definitions(object):
|
||||
DOWNLOADPAGE = "https://www.libreoffice.org/download/download/"
|
||||
ARCHIVE = "https://downloadarchive.documentfoundation.org/libreoffice/old/"
|
||||
RELEASE = "https://download.documentfoundation.org/libreoffice/stable/"
|
||||
DAILY = "https://dev-builds.libreoffice.org/daily/master/Linux-rpm_deb-x86_64@tb87-TDF/"
|
||||
PRERELEASE = "https://dev-builds.libreoffice.org/pre-releases/deb/x86_64/"
|
||||
|
||||
SELECTORS = {
|
||||
'still': {
|
||||
'URL': DOWNLOADPAGE,
|
||||
'xpath': '(//span[@class="dl_version_number"])[last()]/text()'
|
||||
},
|
||||
'fresh': {
|
||||
'URL': DOWNLOADPAGE,
|
||||
'xpath': '(//span[@class="dl_version_number"])[1]/text()'
|
||||
},
|
||||
'prerelease': {
|
||||
'URL': DOWNLOADPAGE,
|
||||
'xpath': '//p[@class="lead_libre"][last()]/following-sibling::ul[last()]/li/a/text()'
|
||||
},
|
||||
'daily': {
|
||||
'URL': DAILY,
|
||||
'xpath': '//td/a'
|
||||
}
|
||||
}
|
||||
|
||||
class Base(object):
|
||||
# Class for static methods which might be useful even outside the build
|
||||
# scripts.
|
||||
|
||||
@staticmethod
|
||||
def dailyurl(date = datetime.datetime.today()):
|
||||
"""Returns the URL for the latest valid daily build."""
|
||||
# As per other parts of the build, we need to maintain an URL also for
|
||||
# x86 versions that it isn't really provided.
|
||||
# As such, the return value must be a dictionary
|
||||
|
||||
# Get the anchor for today's builds
|
||||
a = etree.HTML(urllib.request.urlopen(Definitions.DAILY).read()).xpath("//td/a[contains(text(), '" + date.strftime('%Y-%m-%d') + "')]/text()")
|
||||
if len(a) == 0:
|
||||
# No results found, no version found, let's return a
|
||||
return { 'x86': '-', 'x86_64': '-' }
|
||||
|
||||
# On the contrary, more than a version is found. let's order the
|
||||
# list and get the latest item
|
||||
return { 'x86': '-', 'x86_64': Definitions.SELECTORS['daily']['URL'] + sorted(a)[-1] }
|
||||
|
||||
@staticmethod
|
||||
def dailyver(date = datetime.datetime.today()):
|
||||
"""Returns versions present on the latest daily build."""
|
||||
url = Base.dailyurl(date)['x86_64']
|
||||
# If no daily releases has been provided yet, return empty
|
||||
if url == '-':
|
||||
return []
|
||||
|
||||
# Rerun the page parsing, this time to find out the versions built
|
||||
b = etree.HTML(urllib.request.urlopen(url).read()).xpath("//td/a[contains(text(), '_deb.tar.gz')]/text()")
|
||||
# This should have returned the main package for a version, but can
|
||||
# have returned multiple ones, so let's treat it as a list
|
||||
return [ x.split('_')[1] for x in b ]
|
||||
|
||||
@staticmethod
|
||||
def namedver(query):
|
||||
"""Gets the version for a specific named version."""
|
||||
|
||||
if query == 'daily' or query == 'yesterday':
|
||||
# Daily needs double parsing for the same result to apply.
|
||||
# We first select today's build anchor:
|
||||
date = datetime.datetime.today()
|
||||
if query == 'yesterday':
|
||||
# Use yesterdays' date for testing purposes.
|
||||
date += datetime.timedelta(days=-1)
|
||||
return Base.dailyver(date)
|
||||
|
||||
# In case the query isn't for daily
|
||||
return etree.HTML(urllib.request.urlopen(Definitions.SELECTORS[query]['URL']).read()).xpath(Definitions.SELECTORS[query]['xpath'])
|
||||
|
||||
@staticmethod
|
||||
def fullversion(version):
|
||||
"""Get latest full version from Archive based on partial version."""
|
||||
versionlist = etree.HTML(urllib.request.urlopen(Definitions.ARCHIVE).read()).xpath(f"//td/a[starts-with(text(), '{version}')]/text()")
|
||||
if versionlist:
|
||||
cleanlist = sorted([ x.strip('/') for x in versionlist ])
|
||||
|
||||
# Sorting, then returning the last version
|
||||
return cleanlist[-1]
|
||||
|
||||
return None
|
||||
|
||||
@staticmethod
|
||||
def urlfromqueryandver(query, version):
|
||||
"""Returns the fetching URL based on the queried version and the numeric version of it."""
|
||||
# This has the purpose to simplify and explain how the releases are
|
||||
# layed out.
|
||||
|
||||
# If the query tells about daily or 'yesterday' (for testing purposes),
|
||||
# we might ignore versions and return the value coming from dailyurl:
|
||||
if query == 'daily':
|
||||
return Base.dailyurl()
|
||||
if query == 'yesterday':
|
||||
date = datetime.datetime.today() + datetime.timedelta(days=-1)
|
||||
return Base.dailyurl(date)
|
||||
|
||||
# All other versions will be taken from Archive, as such we need a full
|
||||
# version.
|
||||
|
||||
# If the version has only 2 points in it (or splits into three parts by '.'), that's not a full version and we will call the getlatestver() function
|
||||
fullversion = str(version)
|
||||
if len(fullversion.split('.')) <= 3:
|
||||
fullversion = str(Base.fullversion(version))
|
||||
|
||||
# So the final URL is the Archive one, plus the full versions, plus a
|
||||
# final '/deb/' - and an arch subfolder
|
||||
baseurl = Definitions.ARCHIVE + fullversion + '/deb/'
|
||||
retval = {}
|
||||
|
||||
# x86 binaries are not anymore offered after 6.3.0.
|
||||
if parse_version(fullversion) < parse_version('6.3.0'):
|
||||
retval['x86'] = baseurl + 'x86/'
|
||||
else:
|
||||
retval['x86'] = '-'
|
||||
|
||||
retval['x86_64'] = baseurl + 'x86_64/'
|
||||
|
||||
return retval
|
||||
|
||||
@staticmethod
|
||||
def collectedbuilds(query):
|
||||
"""Creates a list of Builds based on each queried version found."""
|
||||
retval = []
|
||||
if '.' in query:
|
||||
# Called with a numeric query. Pass it to RemoteBuild
|
||||
retval.append(RemoteBuild(query))
|
||||
else:
|
||||
# Named query
|
||||
a = Base.namedver(query)
|
||||
|
||||
if not a:
|
||||
# a is empty
|
||||
return retval
|
||||
|
||||
if isinstance(a, list) and len(a) > 1:
|
||||
retval.extend([ RemoteBuild(query, version) for version in a ])
|
||||
else:
|
||||
retval.append(RemoteBuild(query))
|
||||
|
||||
return sorted(retval, key=lambda x: x.version)
|
||||
|
||||
|
||||
class RemoteBuild(object):
|
||||
|
||||
def __init__(self, query, version = None):
|
||||
"""Should simplify the single builded version."""
|
||||
self.query = query
|
||||
self.version = ''
|
||||
self.basedirurl = { 'x86': '-', 'x86_64': '-' }
|
||||
|
||||
if version and isinstance(version, str):
|
||||
self.version = version
|
||||
|
||||
if not '.' in self.query:
|
||||
# Named version.
|
||||
# Let's check if a specific version was requested.
|
||||
if self.version == '':
|
||||
# In case it was not requested, we will carry on the generic
|
||||
# namedver() query.
|
||||
# If the results are more than one, we'll take the latest (since we are requested to provide a single build).
|
||||
a = Base.namedver(self.query)
|
||||
|
||||
if isinstance(a, list):
|
||||
# if the number of versions is zero, return and exit
|
||||
if not a:
|
||||
return None
|
||||
|
||||
if len(a) == 1:
|
||||
# version is a single one.
|
||||
self.version = a[0]
|
||||
else:
|
||||
# In this case, we will select the latest release.
|
||||
self.version = sorted(a)[-1]
|
||||
|
||||
# If the version has already a version, as requested by user,
|
||||
# continue using that version
|
||||
else:
|
||||
# In case of numbered queries, put it as initial version
|
||||
self.version = self.query
|
||||
|
||||
if len(str(self.version).split('.')) < 4:
|
||||
# If not 4 dotted, let's search for the 4 dotted version
|
||||
self.version = Base.fullversion(self.version)
|
||||
|
||||
self.basedirurl = Base.urlfromqueryandver(self.query, self.version)
|
||||
|
||||
def todict(self):
|
||||
return {
|
||||
'query': self.query,
|
||||
'version': self.version,
|
||||
'basedirurl': self.basedirurl
|
||||
}
|
||||
|
||||
def __str__(self):
|
||||
return f"""query: {self.query}
|
||||
version: {self.version}
|
||||
x86: {self.basedirurl['x86']}
|
||||
x86_64: {self.basedirurl['x86_64']}"""
|
372
loaih/build.py
372
loaih/build.py
|
@ -1,372 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import urllib.request
|
||||
import loaih
|
||||
from lxml import etree
|
||||
import tempfile, os, sys, glob, subprocess, shutil, re, shlex
|
||||
|
||||
class Collection(list):
|
||||
|
||||
def __init__(self, query, arch = ['x86', 'x86_64']):
|
||||
"""Build a list of version to check/build for this round."""
|
||||
super().__init__()
|
||||
self.extend([ Build(query, arch, version) for version in loaih.Base.collectedbuilds(query) ])
|
||||
|
||||
class Build(loaih.RemoteBuild):
|
||||
LANGSTD = [ 'ar', 'de', 'en-GB', 'es', 'fr', 'it', 'ja', 'ko', 'pt', 'pt-BR', 'ru', 'zh-CN', 'zh-TW' ]
|
||||
LANGBASIC = [ 'en-GB' ]
|
||||
ARCHSTD = [ u'x86', u'x86_64' ]
|
||||
|
||||
def __init__(self, query, arch, version = None):
|
||||
super().__init__(query, version)
|
||||
self.arch = arch
|
||||
self.short_version = str.join('.', self.version.split('.')[0:2])
|
||||
self.branch_version = None
|
||||
if not '.' in self.query:
|
||||
self.branch_version = self.query
|
||||
self.url = self.basedirurl
|
||||
|
||||
# Other default values
|
||||
self.language = 'basic'
|
||||
self.offline_help = False
|
||||
self.portable = False
|
||||
self.updatable = True
|
||||
self.sign = True
|
||||
self.storage_path = '/mnt/appimage'
|
||||
self.download_path = '/var/tmp/downloads'
|
||||
|
||||
# Specific build version
|
||||
self.appversion = ''
|
||||
self.appimagefilename = {}
|
||||
self.zsyncfilename = {}
|
||||
|
||||
# Creating a tempfile
|
||||
self.builddir = tempfile.mkdtemp()
|
||||
self.tarballs = {}
|
||||
self.built = { u'x86': False, u'x86_64': False }
|
||||
|
||||
# Preparing the default for the relative path on the storage for
|
||||
# different versions.
|
||||
# The path will evaluated as part of the check() function, as it is
|
||||
# understood the storage_path can be changed before that phase.
|
||||
self.relative_path = []
|
||||
self.full_path = ''
|
||||
self.baseurl = ''
|
||||
|
||||
def calculate(self):
|
||||
"""Calculate exclusions and other variables."""
|
||||
# AppName
|
||||
self.appname = 'LibreOffice' if not self.query == 'daily' and not self.query == 'prerelease' else 'LibreOfficeDev'
|
||||
|
||||
# Calculating languagepart
|
||||
self.languagepart = "."
|
||||
if ',' in self.language:
|
||||
self.languagepart += self.language.replace(',', '-')
|
||||
else:
|
||||
self.languagepart += self.language
|
||||
|
||||
# Calculating help part
|
||||
self.helppart = '.help' if self.offline_help else ''
|
||||
|
||||
# Building the required names
|
||||
for arch in Build.ARCHSTD:
|
||||
self.appimagefilename[arch] = self.__gen_appimagefilename__(self.version, arch)
|
||||
self.zsyncfilename[arch] = self.appimagefilename[arch] + '.zsync'
|
||||
|
||||
# Mandate to the private function to calculate the full_path available
|
||||
# for the storage and the checks.
|
||||
self.__calculate_full_path__()
|
||||
|
||||
|
||||
def __gen_appimagefilename__(self, version, arch):
|
||||
"""Generalize the construction of the name of the app."""
|
||||
self.appversion = version + self.languagepart + self.helppart
|
||||
return self.appname + f'-{self.appversion}-{arch}.AppImage'
|
||||
|
||||
|
||||
def __calculate_full_path__(self):
|
||||
"""Calculate relative path of the build, based on internal other variables."""
|
||||
if len(self.relative_path) == 0:
|
||||
if self.query == 'daily':
|
||||
self.relative_path.append('daily')
|
||||
elif self.query == 'prerelease':
|
||||
self.relative_path.append('prerelease')
|
||||
|
||||
# Not the same check, an additional one
|
||||
if self.portable:
|
||||
self.relative_path.append('portable')
|
||||
|
||||
fullpath_arr = self.storage_path.split('/')
|
||||
# Joining relative path only if it is not null
|
||||
if len(self.relative_path) > 0:
|
||||
fullpath_arr.extend(self.relative_path)
|
||||
self.full_path = re.sub(r"/+", '/', str.join('/', fullpath_arr))
|
||||
|
||||
|
||||
def check(self):
|
||||
"""Checking if the requested AppImage has been already built."""
|
||||
if not len(self.appimagefilename) == 2:
|
||||
self.calculate()
|
||||
|
||||
for arch in self.arch:
|
||||
print(f"Searching for {self.appimagefilename[arch]}")
|
||||
res = subprocess.run(shlex.split(f"find {self.full_path} -name {self.appimagefilename[arch]}"), capture_output=True, env={ "LC_ALL": "C" }, text=True, encoding='utf-8')
|
||||
|
||||
if "No such file or directory" in res.stderr:
|
||||
# Folder is not existent: so the version was not built
|
||||
# Build stays false, and we go to the next arch
|
||||
continue
|
||||
|
||||
if res.stdout and len(res.stdout.strip("\n")) > 0:
|
||||
# All good, the command was executed fine.
|
||||
print(f"Build for {self.version} found.")
|
||||
self.built[arch] = True
|
||||
|
||||
if self.built[arch]:
|
||||
print(f"The requested AppImage already exists on storage for {arch}. I'll skip downloading, building and moving the results.")
|
||||
|
||||
|
||||
def download(self):
|
||||
"""Downloads the contents of the URL as it was a folder."""
|
||||
print(f"Started downloads for {self.version}. Please wait.")
|
||||
for arch in self.arch:
|
||||
# Checking if a valid path has been provided
|
||||
if self.url[arch] == '-':
|
||||
print(f"No build has been provided for the requested AppImage for {arch}. Continue with other options.")
|
||||
# Faking already built it so to skip other checks.
|
||||
self.built[arch] = True
|
||||
continue
|
||||
|
||||
if self.built[arch]:
|
||||
print(f"A build for {arch} was already found. Skipping specific packages.")
|
||||
continue
|
||||
|
||||
# Identifying downloads
|
||||
contents = etree.HTML(urllib.request.urlopen(self.url[arch]).read()).xpath("//td/a")
|
||||
self.tarballs[arch] = [ x.text for x in contents if x.text.endswith('tar.gz') and 'deb' in x.text ]
|
||||
tarballs = self.tarballs[arch]
|
||||
maintarball = tarballs[0]
|
||||
|
||||
# Create and change directory to the download location
|
||||
os.makedirs(self.download_path, exist_ok = True)
|
||||
os.chdir(self.download_path)
|
||||
for archive in tarballs:
|
||||
# If the archive is already there, do not do anything.
|
||||
if os.path.exists(archive):
|
||||
continue
|
||||
|
||||
# Download the archive
|
||||
try:
|
||||
urllib.request.urlretrieve(self.url[arch] + archive, archive)
|
||||
except:
|
||||
print(f"Failed to download {archive}.")
|
||||
|
||||
print(f"Finished downloads for {self.version}.")
|
||||
|
||||
def build(self):
|
||||
"""Building all the versions."""
|
||||
|
||||
for arch in self.arch:
|
||||
if self.built[arch]:
|
||||
# Already built for arch or path not available. User has already been warned.
|
||||
continue
|
||||
|
||||
# Preparation tasks
|
||||
self.appnamedir = os.path.join(self.builddir, self.appname)
|
||||
os.makedirs(self.appnamedir, exist_ok=True)
|
||||
# And then cd to the appname folder.
|
||||
os.chdir(self.appnamedir)
|
||||
# Download appimagetool from github
|
||||
appimagetoolurl = f"https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-{arch}.AppImage"
|
||||
urllib.request.urlretrieve(appimagetoolurl, 'appimagetool')
|
||||
os.chmod('appimagetool', 0o755)
|
||||
|
||||
# Build the requested version.
|
||||
self.__unpackbuild__(arch)
|
||||
|
||||
|
||||
def __unpackbuild__(self, arch):
|
||||
# We start by filtering out tarballs from the list
|
||||
buildtarballs = [ self.tarballs[arch][0] ]
|
||||
|
||||
# Let's process standard languages and append results to the
|
||||
# buildtarball
|
||||
if self.language == 'basic':
|
||||
if self.offline_help:
|
||||
buildtarballs.extend([ x for x in self.tarballs[arch] if 'pack_en-GB' in x ])
|
||||
else:
|
||||
buildtarballs.extend([ x for x in self.tarballs[arch] if 'langpack_en-GB' in x])
|
||||
elif self.language == 'standard':
|
||||
for lang in Build.LANGSTD:
|
||||
if self.offline_help:
|
||||
buildtarballs.extend([ x for x in self.tarballs[arch] if ('pack_' + lang) in x ])
|
||||
else:
|
||||
buildtarballs.extend([ x for x in self.tarballs[arch] if ('langpack_' + lang) in x ])
|
||||
elif self.language == 'full':
|
||||
if self.offline_help:
|
||||
# We need also all help. Let's replace buildtarball with the
|
||||
# whole bunch
|
||||
buildtarballs = self.tarballs[arch]
|
||||
else:
|
||||
buildtarballs.extend([ x for x in self.tarballs[arch] if 'langpack' in x ])
|
||||
else:
|
||||
# Looping for each language in self.language
|
||||
for lang in self.language.split(","):
|
||||
if self.offline_help:
|
||||
buildtarballs.extend([ x for x in self.tarballs[arch] if ('pack' + lang) in x ])
|
||||
else:
|
||||
buildtarballs.extend([ x for x in self.tarballs[arch] if ('langpack' + lang) in x ])
|
||||
|
||||
os.chdir(self.appnamedir)
|
||||
|
||||
# Unpacking the tarballs
|
||||
for archive in buildtarballs:
|
||||
subprocess.run(shlex.split(f"tar xzf {self.download_path}/{archive}"))
|
||||
|
||||
# create appimagedir
|
||||
self.appimagedir = os.path.join(self.builddir, self.appname, self.appname + '.AppDir')
|
||||
os.makedirs(self.appimagedir, exist_ok = True)
|
||||
|
||||
# At this point, let's decompress the deb packages
|
||||
subprocess.run(shlex.split("find .. -iname '*.deb' -exec dpkg -x {} . \;"), cwd=self.appimagedir)
|
||||
|
||||
if self.portable:
|
||||
subprocess.run(shlex.split("find . -type f -iname 'bootstraprc' -exec sed -i 's|^UserInstallation=.*|UserInstallation=\$SYSUSERCONFIG/libreoffice/%s|g' {} \+" % self.short_version), cwd=self.appimagedir)
|
||||
|
||||
# Changing desktop file
|
||||
subprocess.run(shlex.split("find . -iname startcenter.desktop -exec cp {} . \;"), cwd=self.appimagedir)
|
||||
subprocess.run(shlex.split("sed --in-place 's:^Name=.*$:Name=%s:' startcenter.desktop > startcenter.desktop" % self.appname), cwd=self.appimagedir)
|
||||
|
||||
subprocess.run(shlex.split("find . -name '*startcenter.png' -path '*hicolor*48x48*' -exec cp {} . \;"), cwd=self.appimagedir)
|
||||
|
||||
# Find the name of the binary called in the desktop file.
|
||||
binaryname = ''
|
||||
with open(os.path.join(self.appimagedir, 'startcenter.desktop'), 'r') as d:
|
||||
a = d.readlines()
|
||||
for line in a:
|
||||
if re.match(r'^Exec', line):
|
||||
binaryname = line.split('=')[-1].split(' ')[0]
|
||||
# Esci al primo match
|
||||
break
|
||||
#binary_exec = subprocess.run(shlex.split(r"awk 'BEGIN { FS = \"=\" } /^Exec/ { print $2; exit }' startcenter.desktop | awk '{ print $1 }'"), cwd=self.appimagedir, text=True, encoding='utf-8')
|
||||
#binaryname = binary_exec.stdout.strip("\n")
|
||||
|
||||
bindir=os.path.join(self.appimagedir, 'usr', 'bin')
|
||||
os.makedirs(bindir, exist_ok = True)
|
||||
subprocess.run(shlex.split("find ../../opt -iname soffice -path '*program*' -exec ln -sf {} ./%s \;" % binaryname), cwd=bindir)
|
||||
|
||||
# Download AppRun from github
|
||||
apprunurl = f"https://github.com/AppImage/AppImageKit/releases/download/continuous/AppRun-{arch}"
|
||||
dest = os.path.join(self.appimagedir, 'AppRun')
|
||||
urllib.request.urlretrieve(apprunurl, dest)
|
||||
os.chmod(dest, 0o755)
|
||||
|
||||
# Dealing with extra options
|
||||
buildopts = []
|
||||
if self.sign:
|
||||
buildopts.append('--sign')
|
||||
|
||||
# adding zsync build if updatable
|
||||
if self.updatable:
|
||||
buildopts.append(f"-u 'zsync|{self.zsyncfilename[arch]}'")
|
||||
|
||||
buildopts_str = str.join(' ', buildopts)
|
||||
# Build the number-specific build
|
||||
subprocess.run(shlex.split(f"{self.appnamedir}/appimagetool {buildopts_str} -v ./{self.appname}.AppDir/"), env={ "VERSION": self.appversion })
|
||||
|
||||
print(f"Built AppImage version {self.appversion}")
|
||||
|
||||
# Cleanup phase, before new run.
|
||||
for deb in glob.glob(self.appnamedir + '/*.deb'):
|
||||
os.remove(deb)
|
||||
subprocess.run(shlex.split("find . -mindepth 1 -maxdepth 1 -type d -exec rm -rf {} \+"))
|
||||
|
||||
|
||||
def checksums(self):
|
||||
"""Create checksums of the built versions."""
|
||||
# Skip checksum if initally the build was already found in the storage directory
|
||||
if all(self.built.values()):
|
||||
return
|
||||
|
||||
os.chdir(self.appnamedir)
|
||||
for arch in self.arch:
|
||||
for item in [ self.appimagefilename[arch], self.zsyncfilename[arch] ]:
|
||||
# For any built arch, find out if a file exist.
|
||||
self.__create_checksum__(item)
|
||||
|
||||
|
||||
def __create_checksum__(self, file):
|
||||
"""Internal function to create checksum file."""
|
||||
checksum = subprocess.run(shlex.split(f"md5sum {file}"), capture_output=True, text=True, encoding='utf-8')
|
||||
if checksum.stdout:
|
||||
with open(f"{file}.md5", 'w') as c:
|
||||
c.write(checksum.stdout)
|
||||
|
||||
def publish(self):
|
||||
"""Moves built versions to definitive storage."""
|
||||
if all(self.built.values()):
|
||||
# All files are already present in the full_path
|
||||
return
|
||||
|
||||
os.chdir(self.appnamedir)
|
||||
# Forcing creation of subfolders, in case there is a new build
|
||||
os.makedirs(self.full_path, exist_ok = True)
|
||||
for file in glob.glob("*.AppImage*"):
|
||||
subprocess.run(shlex.split(f"cp -f {file} {self.full_path}"))
|
||||
|
||||
|
||||
def generalize_and_link(self):
|
||||
"""Creates the needed generalized files if needed."""
|
||||
# If called with a pointed version, no generalize and link necessary.
|
||||
if not self.branch_version:
|
||||
return
|
||||
|
||||
# If a prerelease or a daily version, either.
|
||||
if self.query == 'daily' or self.query == 'prerelease':
|
||||
return
|
||||
|
||||
appimagefilename = {}
|
||||
zsyncfilename = {}
|
||||
|
||||
# Creating versions for short version and query text
|
||||
versions = [ self.short_version, self.branch_version ]
|
||||
for arch in Build.ARCHSTD:
|
||||
# If already built, do not do anything.
|
||||
if self.built[arch]:
|
||||
continue
|
||||
|
||||
os.chdir(self.full_path)
|
||||
# if the appimage for the reported arch is not found, skip to next
|
||||
# arch
|
||||
if not os.path.exists(self.appimagefilename[arch]):
|
||||
continue
|
||||
|
||||
# Doing it both for short_name and for branchname
|
||||
for version in versions:
|
||||
appimagefilename[arch] = self.appname + '-' + version + self.languagepart + self.helppart + f'-{arch}.AppImage'
|
||||
zsyncfilename[arch] = appimagefilename[arch] + '.zsync'
|
||||
|
||||
# Create the symlink
|
||||
print(f"Creating {appimagefilename[arch]} and checksums.")
|
||||
if os.path.exists(appimagefilename[arch]):
|
||||
os.unlink(appimagefilename[arch])
|
||||
os.symlink(self.appimagefilename[arch], appimagefilename[arch])
|
||||
# Create the checksum for the AppImage
|
||||
self.__create_checksum__(appimagefilename[arch])
|
||||
# Do not continue if no zsync file is provided.
|
||||
if not self.updatable:
|
||||
continue
|
||||
|
||||
print(f"Creating zsync file for version {version}.")
|
||||
if os.path.exists(zsyncfilename[arch]):
|
||||
os.unlink(zsyncfilename[arch])
|
||||
shutil.copyfile(self.zsyncfilename[arch], zsyncfilename[arch])
|
||||
# Editing the zsyncfile
|
||||
subprocess.run(shlex.split(f"sed --in-place 's/^Filename:.*$/Filename: {appimagefilename[arch]}/' {zsyncfilename[arch]}"))
|
||||
self.__create_checksum__(zsyncfilename[arch])
|
||||
|
||||
|
||||
def __del__(self):
|
||||
"""Destructor"""
|
||||
# Cleaning up build directory
|
||||
shutil.rmtree(self.builddir)
|
115
loaih/script.py
115
loaih/script.py
|
@ -1,115 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
|
||||
import click
|
||||
import yaml
|
||||
import loaih, loaih.build
|
||||
import re, sys, json
|
||||
|
||||
@click.group()
|
||||
def cli():
|
||||
pass
|
||||
|
||||
@cli.command()
|
||||
@click.option('-j', '--json', 'jsonout', default=False, is_flag=True, help="Output format in json.")
|
||||
@click.argument('query')
|
||||
def getversion(query, jsonout):
|
||||
b = []
|
||||
queries = []
|
||||
if ',' in query:
|
||||
queries.extend(query.split(','))
|
||||
else:
|
||||
queries.append(query)
|
||||
|
||||
for q in queries:
|
||||
b.extend(loaih.Base.collectedbuilds(q))
|
||||
|
||||
if len(b) > 0:
|
||||
if jsonout:
|
||||
click.echo(json.dumps([x.todict() for x in b]))
|
||||
else:
|
||||
for v in b:
|
||||
click.echo(v)
|
||||
|
||||
@cli.command()
|
||||
@click.option('-a', '--arch', 'arch', type=click.Choice(['x86', 'x86_64', 'all'], case_sensitive=False), default='all', help="Build the AppImage for a specific architecture. If there is no specific options, the process will build for both architectures (if available). Default: all")
|
||||
@click.option('-c/-C', '--check/--no-check', 'check', default=True, help="Check in the final storage if the queried version is existent. Default: check")
|
||||
@click.option('-d', '--download-path', 'download_path', default = '/var/tmp/downloads', type=str, help="Path to the download folder. Default: /var/tmp/downloads")
|
||||
@click.option('-l', '--language', 'language', default = 'basic', type=str, help="Languages to be included. Options: basic, standard, full, a language string (e.g. 'it') or a list of languages comma separated (e.g.: 'en-US,en-GB,it'). Default: basic")
|
||||
@click.option('-o/-O', '--offline-help/--no-offline-help', 'offline', default = False, help="Include or not the offline help for the chosen languages. Default: no offline help")
|
||||
@click.option('-p/-P', '--portable/--no-portable', 'portable', default = False, help="Create a portable version of the AppImage or not. Default: no portable")
|
||||
@click.option('-r', '--repo-path', 'repo_path', default = '/mnt/appimage', type=str, help="Path to the final storage of the AppImage. Default: /mnt/appimage")
|
||||
@click.option('-s/-S', '--sign/--no-sign', 'sign', default=True, help="Wether to sign the build. Default: sign")
|
||||
@click.option('-u/-U', '--updatable/--no-updatable', 'updatable', default = True, help="Create an updatable version of the AppImage or not. Default: updatable")
|
||||
@click.argument('query')
|
||||
def build(arch, language, offline, portable, updatable, download_path, repo_path, check, sign, query):
|
||||
# Parsing options
|
||||
arches = []
|
||||
if arch.lower() == 'all':
|
||||
# We need to build it twice.
|
||||
arches = [ u'x86', u'x86_64' ]
|
||||
else:
|
||||
arches = [ arch.lower() ]
|
||||
|
||||
if query.endswith('.yml') or query.endswith('.yaml'):
|
||||
# This is a buildfile. So we have to load the file and pass the build options ourselves.
|
||||
config = {}
|
||||
with open(query, 'r') as file:
|
||||
config = yaml.safe_load(file)
|
||||
|
||||
# With the config file, we ignore all the command line options and set
|
||||
# generic default.
|
||||
for build in config['builds']:
|
||||
# Loop a run for each build.
|
||||
collection = loaih.build.Collection(build['query'], arches)
|
||||
|
||||
for obj in collection:
|
||||
# Configuration phase
|
||||
obj.language = build['language']
|
||||
obj.offline_help = build['offline_help']
|
||||
obj.portable = build['portable']
|
||||
obj.updatable = True
|
||||
obj.storage_path = config['data']['repo'] if 'repo' in config['data'] and config['data']['repo'] else '/srv/http/appimage.sys42.eu'
|
||||
obj.download_path = config['data']['download'] if 'download' in config['data'] and config['data']['download'] else '/var/tmp/downloads'
|
||||
|
||||
if 'sign' in config['data'] and config['data']['sign']:
|
||||
obj.sign = True
|
||||
|
||||
# Build phase
|
||||
obj.calculate()
|
||||
if not 'force' in config['data'] or not config['data']['force']:
|
||||
obj.check()
|
||||
|
||||
obj.download()
|
||||
obj.build()
|
||||
obj.checksums()
|
||||
obj.publish()
|
||||
obj.generalize_and_link()
|
||||
del obj
|
||||
|
||||
else:
|
||||
collection = loaih.build.Collection(query, arches)
|
||||
for obj in collection:
|
||||
# Configuration phase
|
||||
obj.language = language
|
||||
obj.offline_help = offline
|
||||
obj.portable = portable
|
||||
obj.updatable = updatable
|
||||
obj.storage_path = repo_path
|
||||
obj.download_path = download_path
|
||||
|
||||
if sign:
|
||||
obj.sign = True
|
||||
|
||||
# Running phase
|
||||
obj.calculate()
|
||||
|
||||
if check:
|
||||
obj.check()
|
||||
|
||||
obj.download()
|
||||
obj.build()
|
||||
obj.checksums()
|
||||
obj.publish()
|
||||
obj.generalize_and_link()
|
||||
del obj
|
|
@ -1,67 +1,69 @@
|
|||
---
|
||||
data:
|
||||
repo: /mnt/appimage
|
||||
repo: https://appimages.libreitalia.org
|
||||
remote_host: ciccio
|
||||
remote_path: /var/lib/nethserver/vhost/appimages
|
||||
download: /var/tmp/downloads
|
||||
force: no
|
||||
sign: yes
|
||||
force: false
|
||||
sign: true
|
||||
|
||||
builds:
|
||||
- query: prerelease
|
||||
language: basic
|
||||
offline_help: no
|
||||
portable: no
|
||||
|
||||
offline_help: false
|
||||
portable: false
|
||||
|
||||
- query: prerelease
|
||||
language: basic
|
||||
offline_help: yes
|
||||
portable: no
|
||||
|
||||
offline_help: true
|
||||
portable: false
|
||||
|
||||
- query: prerelease
|
||||
language: basic
|
||||
offline_help: no
|
||||
portable: yes
|
||||
|
||||
offline_help: false
|
||||
portable: true
|
||||
|
||||
- query: prerelease
|
||||
language: basic
|
||||
offline_help: yes
|
||||
portable: yes
|
||||
offline_help: true
|
||||
portable: true
|
||||
|
||||
- query: prerelease
|
||||
language: standard
|
||||
offline_help: no
|
||||
portable: no
|
||||
|
||||
offline_help: false
|
||||
portable: false
|
||||
|
||||
- query: prerelease
|
||||
language: standard
|
||||
offline_help: yes
|
||||
portable: no
|
||||
|
||||
offline_help: true
|
||||
portable: false
|
||||
|
||||
- query: prerelease
|
||||
language: standard
|
||||
offline_help: no
|
||||
portable: yes
|
||||
|
||||
offline_help: false
|
||||
portable: true
|
||||
|
||||
- query: prerelease
|
||||
language: standard
|
||||
offline_help: yes
|
||||
portable: yes
|
||||
offline_help: true
|
||||
portable: true
|
||||
|
||||
- query: prerelease
|
||||
language: full
|
||||
offline_help: no
|
||||
portable: no
|
||||
|
||||
offline_help: false
|
||||
portable: false
|
||||
|
||||
- query: prerelease
|
||||
language: full
|
||||
offline_help: yes
|
||||
portable: no
|
||||
|
||||
offline_help: true
|
||||
portable: false
|
||||
|
||||
- query: prerelease
|
||||
language: full
|
||||
offline_help: no
|
||||
portable: yes
|
||||
|
||||
offline_help: false
|
||||
portable: true
|
||||
|
||||
- query: prerelease
|
||||
language: full
|
||||
offline_help: yes
|
||||
portable: yes
|
||||
offline_help: true
|
||||
portable: true
|
||||
|
|
|
@ -0,0 +1,54 @@
|
|||
# vim:sts=4:sw=4
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[project]
|
||||
name = "loaih"
|
||||
dynamic = ["version"]
|
||||
authors = [
|
||||
{ name = "Emiliano Vavassori", email = "syntaxerrormmm@gmail.com" },
|
||||
]
|
||||
description = "LOAIH - LibreOffice AppImage Helpers, help build a LibreOffice AppImage"
|
||||
readme = "README.md"
|
||||
license = "MIT"
|
||||
requires-python = ">= 3.6"
|
||||
dependencies = [
|
||||
"click",
|
||||
"lxml",
|
||||
"pyyaml",
|
||||
"requests",
|
||||
]
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: 3.6",
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Developers",
|
||||
"Intended Audience :: End Users/Desktop",
|
||||
"Intended Audience :: System Administrators",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Natural Language :: English",
|
||||
"Operating System :: POSIX :: Linux",
|
||||
"Topic :: Office/Business",
|
||||
"Topic :: Software Development :: Build Tools",
|
||||
"Topic :: Software Development :: Quality Assurance",
|
||||
"Topic :: Software Development :: Testing",
|
||||
"Topic :: Software Development :: User Interfaces"
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
loaih = "loaih.script:cli"
|
||||
|
||||
[project.urls]
|
||||
Homepage = "https://git.libreitalia.org/LibreItalia/loaih/"
|
||||
|
||||
[tool.hatch.version]
|
||||
path = "src/loaih/version.py"
|
||||
|
||||
[tool.hatch.build.targets.sdist]
|
||||
include = [
|
||||
"src/loaih",
|
||||
]
|
22
setup.py
22
setup.py
|
@ -1,22 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||
# vim:sts=4:sw=4
|
||||
|
||||
from setuptools import setup,find_packages
|
||||
|
||||
setup(
|
||||
name="loaih",
|
||||
version="1.2.0",
|
||||
description="LOAIH - LibreOffice AppImage Helpers, help build a LibreOffice AppImage",
|
||||
author="Emiliano Vavassori",
|
||||
author_email="syntaxerrormmm@libreoffice.org",
|
||||
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
|
||||
entry_points={
|
||||
'console_scripts': [
|
||||
'loaih = loaih.script:cli',
|
||||
],
|
||||
},
|
||||
install_requires=[ 'click', ],
|
||||
license='MIT',
|
||||
url='https://git.libreitalia.org/LibreItalia/loappimage-helpers/',
|
||||
)
|
|
@ -0,0 +1,259 @@
|
|||
#!/usr/bin/env python
|
||||
# encoding: utf-8
|
||||