forked from solderpunk/AV-98
Access to xdg folders now refactored to be a function
Instead of creating three global variables, a xdg() function now returns the DATA,CONFIG and CACHE folders. This allows us to create the cache only when tentatively accessed (this fixes bug #27)
This commit is contained in:
parent
aad1730cd8
commit
92516082c1
|
@ -8,6 +8,7 @@
|
|||
- ansicat: avoid a crash when urllib.parse.urljoin fails
|
||||
- offpunk: Fix a crash when gus is called without parameters (Von Hohenheiden)
|
||||
- ansicat: fixed a crash when parsing wrong hidden_url in gemini (bug #32)
|
||||
- offpunk: offpunk --version doesn’t create the cache anymore (bug #27)
|
||||
|
||||
## 2.0 - November 16th 2023
|
||||
Changes since 1.10
|
||||
|
|
|
@ -14,7 +14,7 @@ import netcache
|
|||
import offthemes
|
||||
from offutils import run,term_width,is_local,looks_like_base64, looks_like_url
|
||||
import base64
|
||||
from offutils import _DATA_DIR
|
||||
from offutils import xdg
|
||||
try:
|
||||
from readability import Document
|
||||
_HAS_READABILITY = True
|
||||
|
@ -778,7 +778,7 @@ class GopherRenderer(AbstractRenderer):
|
|||
|
||||
class FolderRenderer(GemtextRenderer):
|
||||
#it was initialized with:
|
||||
#self.renderer = FolderRenderer("",self.get_cache_path(),datadir=_DATA_DIR)
|
||||
#self.renderer = FolderRenderer("",self.get_cache_path(),datadir=xdg("data"))
|
||||
def __init__(self,content,url,center=True,datadir=None):
|
||||
GemtextRenderer.__init__(self,content,url,center)
|
||||
self.datadir = datadir
|
||||
|
@ -1325,7 +1325,7 @@ def renderer_from_file(path,url=None,theme=None):
|
|||
def set_renderer(content,url,mime,theme=None):
|
||||
renderer = None
|
||||
if mime == "Local Folder":
|
||||
renderer = FolderRenderer("",url,datadir=_DATA_DIR)
|
||||
renderer = FolderRenderer("",url,datadir=xdg("data"))
|
||||
if theme:
|
||||
renderer.set_theme(theme)
|
||||
return renderer
|
||||
|
|
18
netcache.py
18
netcache.py
|
@ -14,7 +14,7 @@ import sqlite3
|
|||
from ssl import CertificateError
|
||||
import ansicat
|
||||
import offutils
|
||||
from offutils import _CACHE_PATH,_DATA_DIR,_CONFIG_DIR
|
||||
from offutils import xdg
|
||||
import time
|
||||
try:
|
||||
import chardet
|
||||
|
@ -35,10 +35,6 @@ try:
|
|||
except (ModuleNotFoundError,ImportError):
|
||||
_DO_HTTP = False
|
||||
|
||||
if not os.path.exists(_CACHE_PATH):
|
||||
print("Creating cache directory {}".format(_CACHE_PATH))
|
||||
os.makedirs(_CACHE_PATH)
|
||||
|
||||
# This list is also used as a list of supported protocols
|
||||
standard_ports = {
|
||||
"gemini" : 1965,
|
||||
|
@ -149,7 +145,7 @@ def get_cache_path(url,add_index=True):
|
|||
elif scheme == "mailto":
|
||||
path = parsed.path
|
||||
elif url.startswith("list://"):
|
||||
listdir = os.path.join(_DATA_DIR,"lists")
|
||||
listdir = os.path.join(xdg("data"),"lists")
|
||||
listname = url[7:].lstrip("/")
|
||||
if listname in [""]:
|
||||
name = "My Lists"
|
||||
|
@ -195,7 +191,7 @@ def get_cache_path(url,add_index=True):
|
|||
if local:
|
||||
cache_path = path
|
||||
elif scheme and host:
|
||||
cache_path = os.path.expanduser(_CACHE_PATH + scheme + "/" + host + path)
|
||||
cache_path = os.path.expanduser(xdg("cache") + scheme + "/" + host + path)
|
||||
#There’s an OS limitation of 260 characters per path.
|
||||
#We will thus cut the path enough to add the index afterward
|
||||
cache_path = cache_path[:249]
|
||||
|
@ -504,7 +500,7 @@ def _validate_cert(address, host, cert,accept_bad_ssl=False,automatic_choice=Non
|
|||
sha.update(cert)
|
||||
fingerprint = sha.hexdigest()
|
||||
|
||||
db_path = os.path.join(_CONFIG_DIR, "tofu.db")
|
||||
db_path = os.path.join(xdg("config"), "tofu.db")
|
||||
db_conn = sqlite3.connect(db_path)
|
||||
db_cur = db_conn.cursor()
|
||||
|
||||
|
@ -534,7 +530,7 @@ def _validate_cert(address, host, cert,accept_bad_ssl=False,automatic_choice=Non
|
|||
db_conn.commit()
|
||||
break
|
||||
else:
|
||||
certdir = os.path.join(_CONFIG_DIR, "cert_cache")
|
||||
certdir = os.path.join(xdg("config"), "cert_cache")
|
||||
with open(os.path.join(certdir, most_frequent_cert+".crt"), "rb") as fp:
|
||||
previous_cert = fp.read()
|
||||
if _HAS_CRYPTOGRAPHY:
|
||||
|
@ -577,7 +573,7 @@ def _validate_cert(address, host, cert,accept_bad_ssl=False,automatic_choice=Non
|
|||
VALUES (?, ?, ?, ?, ?, ?)""",
|
||||
(host, address, fingerprint, now, now, 1))
|
||||
db_conn.commit()
|
||||
certdir = os.path.join(_CONFIG_DIR, "cert_cache")
|
||||
certdir = os.path.join(xdg("config"), "cert_cache")
|
||||
if not os.path.exists(certdir):
|
||||
os.makedirs(certdir)
|
||||
with open(os.path.join(certdir, fingerprint+".crt"), "wb") as fp:
|
||||
|
@ -898,7 +894,7 @@ def main():
|
|||
path = get_cache_path(u)
|
||||
else:
|
||||
print("Download URL: %s" %u)
|
||||
path = fetch(u,max_size=args.max_size,timeout=args.timeout)
|
||||
path,url = fetch(u,max_size=args.max_size,timeout=args.timeout)
|
||||
if args.path:
|
||||
print(path)
|
||||
else:
|
||||
|
|
26
offpunk.py
26
offpunk.py
|
@ -27,7 +27,7 @@ import opnk
|
|||
import ansicat
|
||||
import offthemes
|
||||
from offutils import run,term_width,is_local,mode_url,unmode_url, looks_like_url
|
||||
from offutils import _CONFIG_DIR,_DATA_DIR,_CACHE_PATH
|
||||
from offutils import xdg
|
||||
import offblocklist
|
||||
try:
|
||||
import setproctitle
|
||||
|
@ -875,9 +875,9 @@ Marks are temporary until shutdown (not saved to disk)."""
|
|||
output += " - copy to/from clipboard (xsel) : " + has(_HAS_XSEL)
|
||||
output += " - restore last position (less 572+) : " + has(opnk._LESS_RESTORE_POSITION)
|
||||
output += "\n"
|
||||
output += "Config directory : " + _CONFIG_DIR + "\n"
|
||||
output += "User Data directory : " + _DATA_DIR + "\n"
|
||||
output += "Cache directoy : " + _CACHE_PATH
|
||||
output += "Config directory : " + xdg("config") + "\n"
|
||||
output += "User Data directory : " + xdg("data") + "\n"
|
||||
output += "Cache directoy : " + xdg("cache")
|
||||
|
||||
print(output)
|
||||
|
||||
|
@ -1106,9 +1106,9 @@ If no argument given, URL is added to Bookmarks."""
|
|||
def get_list(self,list):
|
||||
list_path = self.list_path(list)
|
||||
if not list_path:
|
||||
old_file_gmi = os.path.join(_CONFIG_DIR,list + ".gmi")
|
||||
old_file_nogmi = os.path.join(_CONFIG_DIR,list)
|
||||
target = os.path.join(_DATA_DIR,"lists")
|
||||
old_file_gmi = os.path.join(xdg("config"),list + ".gmi")
|
||||
old_file_nogmi = os.path.join(xdg("config"),list)
|
||||
target = os.path.join(xdg("data"),"lists")
|
||||
if os.path.exists(old_file_gmi):
|
||||
shutil.move(old_file_gmi,target)
|
||||
elif os.path.exists(old_file_nogmi):
|
||||
|
@ -1342,7 +1342,7 @@ archives, which is a special historical list limited in size. It is similar to `
|
|||
#return the path of the list file if list exists.
|
||||
#return None if the list doesn’t exist.
|
||||
def list_path(self,list):
|
||||
listdir = os.path.join(_DATA_DIR,"lists")
|
||||
listdir = os.path.join(xdg("data"),"lists")
|
||||
list_path = os.path.join(listdir, "%s.gmi"%list)
|
||||
if os.path.exists(list_path):
|
||||
return list_path
|
||||
|
@ -1354,7 +1354,7 @@ archives, which is a special historical list limited in size. It is similar to `
|
|||
if list in ["create","edit","delete","help"]:
|
||||
print("%s is not allowed as a name for a list"%list)
|
||||
elif not list_path:
|
||||
listdir = os.path.join(_DATA_DIR,"lists")
|
||||
listdir = os.path.join(xdg("data"),"lists")
|
||||
os.makedirs(listdir,exist_ok=True)
|
||||
list_path = os.path.join(listdir, "%s.gmi"%list)
|
||||
with open(list_path,"a") as lfile:
|
||||
|
@ -1392,7 +1392,7 @@ If current page was not in a list, this command is similar to `add LIST`."""
|
|||
self.list_add_line(args[0])
|
||||
|
||||
def list_lists(self):
|
||||
listdir = os.path.join(_DATA_DIR,"lists")
|
||||
listdir = os.path.join(xdg("data"),"lists")
|
||||
to_return = []
|
||||
if os.path.exists(listdir):
|
||||
lists = os.listdir(listdir)
|
||||
|
@ -1469,7 +1469,7 @@ The following lists cannot be removed or frozen but can be edited with "list edi
|
|||
- tour : contains the next URLs to visit during a tour (see "help tour")
|
||||
|
||||
"""
|
||||
listdir = os.path.join(_DATA_DIR,"lists")
|
||||
listdir = os.path.join(xdg("data"),"lists")
|
||||
os.makedirs(listdir,exist_ok=True)
|
||||
if not arg:
|
||||
lists = self.list_lists()
|
||||
|
@ -1784,7 +1784,7 @@ def main():
|
|||
GeminiClient.do_version(None,None)
|
||||
sys.exit()
|
||||
else:
|
||||
for f in [_CONFIG_DIR, _DATA_DIR]:
|
||||
for f in [xdg("config"), xdg("data")]:
|
||||
if not os.path.exists(f):
|
||||
print("Creating config directory {}".format(f))
|
||||
os.makedirs(f)
|
||||
|
@ -1798,7 +1798,7 @@ def main():
|
|||
# Queue is a list of command (potentially empty)
|
||||
def read_config(queue,rcfile=None,interactive=True):
|
||||
if not rcfile:
|
||||
rcfile = os.path.join(_CONFIG_DIR, "offpunkrc")
|
||||
rcfile = os.path.join(xdg("config"), "offpunkrc")
|
||||
if os.path.exists(rcfile):
|
||||
print("Using config %s" % rcfile)
|
||||
with open(rcfile, "r") as fp:
|
||||
|
|
99
offutils.py
99
offutils.py
|
@ -18,48 +18,67 @@ import netcache
|
|||
|
||||
CACHE_VERSION = 1
|
||||
|
||||
## Config directories
|
||||
## We implement our own python-xdg to avoid conflict with existing libraries.
|
||||
_home = os.path.expanduser('~')
|
||||
data_home = os.environ.get('XDG_DATA_HOME') or \
|
||||
os.path.join(_home,'.local','share')
|
||||
config_home = os.environ.get('XDG_CONFIG_HOME') or \
|
||||
os.path.join(_home,'.config')
|
||||
_CONFIG_DIR = os.path.join(os.path.expanduser(config_home),"offpunk/")
|
||||
_DATA_DIR = os.path.join(os.path.expanduser(data_home),"offpunk/")
|
||||
_old_config = os.path.expanduser("~/.offpunk/")
|
||||
## Look for pre-existing config directory, if any
|
||||
if os.path.exists(_old_config):
|
||||
_CONFIG_DIR = _old_config
|
||||
#if no XDG .local/share and not XDG .config, we use the old config
|
||||
if not os.path.exists(data_home) and os.path.exists(_old_config):
|
||||
_DATA_DIR = _CONFIG_DIR
|
||||
cache_home = os.environ.get('XDG_CACHE_HOME') or\
|
||||
os.path.join(_home,'.cache')
|
||||
_CACHE_PATH = os.path.join(os.path.expanduser(cache_home),"offpunk/")
|
||||
os.makedirs(_CACHE_PATH,exist_ok=True)
|
||||
# We upgrade the cache only once at startup, hence the UPGRADED variable
|
||||
# This is only to avoid unecessary checks each time the cache is accessed
|
||||
UPGRADED=False
|
||||
def upgrade_cache(cache_folder):
|
||||
#Let’s read current version of the cache
|
||||
version_path = cache_folder + ".version"
|
||||
current_version = 0
|
||||
if os.path.exists(version_path):
|
||||
current_str = None
|
||||
with open(version_path) as f:
|
||||
current_str = f.read()
|
||||
f.close()
|
||||
try:
|
||||
current_version = int(current_str)
|
||||
except:
|
||||
current_version = 0
|
||||
#Now, let’s upgrade the cache if needed
|
||||
while current_version < CACHE_VERSION:
|
||||
current_version += 1
|
||||
upgrade_func = getattr(netcache_migration,"upgrade_to_"+str(current_version))
|
||||
upgrade_func(cache_folder)
|
||||
with open(version_path,"w") as f:
|
||||
f.write(str(current_version))
|
||||
f.close()
|
||||
UPGRADED=True
|
||||
|
||||
#get xdg folder. Folder should be "cache", "data" or "config"
|
||||
def xdg(folder="cache"):
|
||||
## Config directories
|
||||
## We implement our own python-xdg to avoid conflict with existing libraries.
|
||||
_home = os.path.expanduser('~')
|
||||
data_home = os.environ.get('XDG_DATA_HOME') or \
|
||||
os.path.join(_home,'.local','share')
|
||||
config_home = os.environ.get('XDG_CONFIG_HOME') or \
|
||||
os.path.join(_home,'.config')
|
||||
_CONFIG_DIR = os.path.join(os.path.expanduser(config_home),"offpunk/")
|
||||
_DATA_DIR = os.path.join(os.path.expanduser(data_home),"offpunk/")
|
||||
_old_config = os.path.expanduser("~/.offpunk/")
|
||||
## Look for pre-existing config directory, if any
|
||||
if os.path.exists(_old_config):
|
||||
_CONFIG_DIR = _old_config
|
||||
#if no XDG .local/share and not XDG .config, we use the old config
|
||||
if not os.path.exists(data_home) and os.path.exists(_old_config):
|
||||
_DATA_DIR = _CONFIG_DIR
|
||||
cache_home = os.environ.get('XDG_CACHE_HOME') or\
|
||||
os.path.join(_home,'.cache')
|
||||
_CACHE_PATH = os.path.join(os.path.expanduser(cache_home),"offpunk/")
|
||||
os.makedirs(_CACHE_PATH,exist_ok=True)
|
||||
if folder == "cache" and not UPGRADED:
|
||||
upgrade_cache(_CACHE_PATH)
|
||||
if folder == "cache":
|
||||
return _CACHE_PATH
|
||||
elif folder == "config":
|
||||
return _CONFIG_DIR
|
||||
elif folder == "data":
|
||||
return _DATA_DIR
|
||||
else:
|
||||
print("No XDG folder for %s. Check your code."%folder)
|
||||
return None
|
||||
|
||||
#Let’s read current version of the cache
|
||||
version_path = _CACHE_PATH + ".version"
|
||||
current_version = 0
|
||||
if os.path.exists(version_path):
|
||||
current_str = None
|
||||
with open(version_path) as f:
|
||||
current_str = f.read()
|
||||
f.close()
|
||||
try:
|
||||
current_version = int(current_str)
|
||||
except:
|
||||
current_version = 0
|
||||
|
||||
#Now, let’s upgrade the cache if needed
|
||||
while current_version < CACHE_VERSION:
|
||||
current_version += 1
|
||||
upgrade_func = getattr(netcache_migration,"upgrade_to_"+str(current_version))
|
||||
upgrade_func(_CACHE_PATH)
|
||||
with open(version_path,"w") as f:
|
||||
f.write(str(current_version))
|
||||
f.close()
|
||||
|
||||
#An IPV6 URL should be put between []
|
||||
#We try to detect them has location with more than 2 ":"
|
||||
|
|
Loading…
Reference in New Issue