2019-06-22 12:58:21 +00:00
|
|
|
|
#!/usr/bin/env python3
|
2021-12-30 15:03:08 +00:00
|
|
|
|
# Offpunk Offline Gemini client
|
2023-03-12 06:47:38 +00:00
|
|
|
|
"""
|
|
|
|
|
Offline-First Gemini/Web/Gopher/RSS reader and browser
|
|
|
|
|
"""
|
|
|
|
|
|
2024-02-12 21:25:46 +00:00
|
|
|
|
__version__ = "2.2"
|
2023-03-12 06:47:38 +00:00
|
|
|
|
|
2023-10-07 21:30:09 +00:00
|
|
|
|
## Initial imports and conditional imports {{{
|
2019-06-22 12:58:21 +00:00
|
|
|
|
import argparse
|
|
|
|
|
import cmd
|
2020-05-16 16:58:53 +00:00
|
|
|
|
import datetime
|
2019-06-22 12:58:21 +00:00
|
|
|
|
import io
|
2020-05-10 15:25:03 +00:00
|
|
|
|
import os
|
2019-06-22 12:58:21 +00:00
|
|
|
|
import os.path
|
2021-12-13 12:49:12 +00:00
|
|
|
|
import filecmp
|
2019-06-22 12:58:21 +00:00
|
|
|
|
import random
|
|
|
|
|
import shlex
|
|
|
|
|
import shutil
|
|
|
|
|
import socket
|
|
|
|
|
import sys
|
|
|
|
|
import time
|
2020-05-11 20:22:24 +00:00
|
|
|
|
import urllib.parse
|
2022-03-30 17:23:44 +00:00
|
|
|
|
import subprocess
|
2023-07-04 21:20:39 +00:00
|
|
|
|
import netcache
|
2023-08-10 23:28:58 +00:00
|
|
|
|
import opnk
|
2023-09-04 18:59:32 +00:00
|
|
|
|
import ansicat
|
2023-08-30 22:27:54 +00:00
|
|
|
|
import offthemes
|
2023-12-01 23:11:34 +00:00
|
|
|
|
from offutils import run,term_width,is_local,mode_url,unmode_url, looks_like_url
|
2023-12-04 10:10:20 +00:00
|
|
|
|
from offutils import xdg
|
2023-09-26 20:21:19 +00:00
|
|
|
|
import offblocklist
|
2022-02-18 22:50:08 +00:00
|
|
|
|
try:
|
|
|
|
|
import setproctitle
|
|
|
|
|
setproctitle.setproctitle("offpunk")
|
|
|
|
|
_HAS_SETPROCTITLE = True
|
|
|
|
|
except ModuleNotFoundError:
|
|
|
|
|
_HAS_SETPROCTITLE = False
|
2022-03-12 17:41:38 +00:00
|
|
|
|
_HAS_XSEL = shutil.which('xsel')
|
2023-10-07 21:30:09 +00:00
|
|
|
|
## }}} end of imports
|
2022-01-10 10:19:29 +00:00
|
|
|
|
|
2019-06-22 12:58:21 +00:00
|
|
|
|
# Command abbreviations
|
|
|
|
|
_ABBREVS = {
|
2022-12-16 00:04:21 +00:00
|
|
|
|
"..": "up",
|
2019-06-22 12:58:21 +00:00
|
|
|
|
"a": "add",
|
|
|
|
|
"b": "back",
|
|
|
|
|
"bb": "blackbox",
|
|
|
|
|
"bm": "bookmarks",
|
|
|
|
|
"book": "bookmarks",
|
2022-01-05 20:12:59 +00:00
|
|
|
|
"cp": "copy",
|
2022-03-07 22:16:17 +00:00
|
|
|
|
"f": "forward",
|
2019-06-22 12:58:21 +00:00
|
|
|
|
"g": "go",
|
|
|
|
|
"h": "history",
|
|
|
|
|
"hist": "history",
|
2022-03-07 22:16:17 +00:00
|
|
|
|
"l": "view",
|
|
|
|
|
"less": "view",
|
2022-03-09 09:15:24 +00:00
|
|
|
|
"man": "help",
|
2022-02-14 22:02:53 +00:00
|
|
|
|
"mv": "move",
|
2019-06-22 12:58:21 +00:00
|
|
|
|
"n": "next",
|
2022-01-05 20:12:59 +00:00
|
|
|
|
"off": "offline",
|
|
|
|
|
"on": "online",
|
2019-06-22 12:58:21 +00:00
|
|
|
|
"p": "previous",
|
|
|
|
|
"prev": "previous",
|
|
|
|
|
"q": "quit",
|
|
|
|
|
"r": "reload",
|
|
|
|
|
"s": "save",
|
|
|
|
|
"se": "search",
|
2022-11-14 20:33:40 +00:00
|
|
|
|
"/": "find",
|
2019-06-22 12:58:21 +00:00
|
|
|
|
"t": "tour",
|
|
|
|
|
"u": "up",
|
2022-03-07 22:16:17 +00:00
|
|
|
|
"v": "view",
|
2022-11-14 20:33:40 +00:00
|
|
|
|
"w": "wikipedia",
|
|
|
|
|
"wen": "wikipedia en",
|
|
|
|
|
"wfr": "wikipedia fr",
|
|
|
|
|
"wes": "wikipedia es",
|
2019-06-22 12:58:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
_MIME_HANDLERS = {
|
|
|
|
|
}
|
|
|
|
|
|
2019-08-13 16:56:15 +00:00
|
|
|
|
# GeminiClient Decorators
|
2019-06-22 12:58:21 +00:00
|
|
|
|
def needs_gi(inner):
|
|
|
|
|
def outer(self, *args, **kwargs):
|
2023-08-03 21:17:12 +00:00
|
|
|
|
if not self.current_url:
|
2019-06-22 12:58:21 +00:00
|
|
|
|
print("You need to 'go' somewhere, first")
|
|
|
|
|
return None
|
|
|
|
|
else:
|
|
|
|
|
return inner(self, *args, **kwargs)
|
|
|
|
|
outer.__doc__ = inner.__doc__
|
|
|
|
|
return outer
|
|
|
|
|
|
|
|
|
|
class GeminiClient(cmd.Cmd):
|
2022-11-30 19:57:23 +00:00
|
|
|
|
def __init__(self, completekey="tab", synconly=False):
|
2019-06-22 12:58:21 +00:00
|
|
|
|
cmd.Cmd.__init__(self)
|
2020-05-23 11:17:12 +00:00
|
|
|
|
# Set umask so that nothing we create can be read by anybody else.
|
|
|
|
|
# The certificate cache and TOFU database contain "browser history"
|
|
|
|
|
# type sensitivie information.
|
2020-05-23 11:24:39 +00:00
|
|
|
|
os.umask(0o077)
|
2023-08-10 23:28:58 +00:00
|
|
|
|
self.opencache = opnk.opencache()
|
2023-09-14 19:35:23 +00:00
|
|
|
|
self.theme = offthemes.default
|
2023-09-23 00:08:48 +00:00
|
|
|
|
self.set_prompt("ON")
|
2023-07-30 14:59:32 +00:00
|
|
|
|
self.current_url = None
|
2019-06-22 12:58:21 +00:00
|
|
|
|
self.hist_index = 0
|
|
|
|
|
self.marks = {}
|
|
|
|
|
self.page_index = 0
|
2019-10-15 19:12:32 +00:00
|
|
|
|
self.permanent_redirects = {}
|
2021-12-09 14:12:32 +00:00
|
|
|
|
# Sync-only mode is restriced by design
|
2021-12-06 15:43:16 +00:00
|
|
|
|
self.offline_only = False
|
2021-12-09 14:12:32 +00:00
|
|
|
|
self.sync_only = False
|
2023-11-08 15:37:13 +00:00
|
|
|
|
self.support_http = netcache._DO_HTTP
|
2022-01-22 14:33:39 +00:00
|
|
|
|
self.automatic_choice = "n"
|
2020-05-10 10:59:26 +00:00
|
|
|
|
self.client_certs = {
|
|
|
|
|
"active": None
|
|
|
|
|
}
|
2020-05-10 11:44:40 +00:00
|
|
|
|
self.active_cert_domains = []
|
2020-05-11 20:22:24 +00:00
|
|
|
|
self.active_is_transient = False
|
2019-06-22 12:58:21 +00:00
|
|
|
|
self.options = {
|
|
|
|
|
"debug" : False,
|
2022-03-19 19:58:18 +00:00
|
|
|
|
"beta" : False,
|
2021-04-27 07:53:58 +00:00
|
|
|
|
"timeout" : 600,
|
2021-12-10 10:27:48 +00:00
|
|
|
|
"short_timeout" : 5,
|
2022-03-19 16:44:28 +00:00
|
|
|
|
"width" : 72,
|
2019-10-14 16:47:02 +00:00
|
|
|
|
"auto_follow_redirects" : True,
|
2020-05-19 21:14:09 +00:00
|
|
|
|
"tls_mode" : "tofu",
|
2022-03-05 15:36:39 +00:00
|
|
|
|
"archives_size" : 200,
|
|
|
|
|
"history_size" : 200,
|
2022-03-24 21:00:04 +00:00
|
|
|
|
"max_size_download" : 10,
|
2022-03-29 11:08:35 +00:00
|
|
|
|
"editor" : None,
|
2022-03-30 10:04:07 +00:00
|
|
|
|
"download_images_first" : True,
|
2023-08-24 13:14:49 +00:00
|
|
|
|
"images_mode" : "readable",
|
2022-03-30 13:46:25 +00:00
|
|
|
|
"redirects" : True,
|
2022-11-14 20:33:40 +00:00
|
|
|
|
# the wikipedia entry needs two %s, one for lang, other for search
|
|
|
|
|
"wikipedia" : "gemini://vault.transjovian.org:1965/search/%s/%s",
|
|
|
|
|
"search" : "gemini://kennedy.gemi.dev/search?%s",
|
2022-11-16 13:44:47 +00:00
|
|
|
|
"accept_bad_ssl_certificates" : False,
|
2023-11-12 14:01:58 +00:00
|
|
|
|
"default_protocol" : "gemini",
|
2022-03-30 13:46:25 +00:00
|
|
|
|
}
|
2023-09-26 20:21:19 +00:00
|
|
|
|
self.redirects = offblocklist.redirects
|
|
|
|
|
for i in offblocklist.blocked:
|
|
|
|
|
self.redirects[i] = "blocked"
|
2023-07-04 11:55:01 +00:00
|
|
|
|
term_width(new_width=self.options["width"])
|
2019-06-22 12:58:21 +00:00
|
|
|
|
self.log = {
|
|
|
|
|
"start_time": time.time(),
|
|
|
|
|
}
|
|
|
|
|
|
2023-09-14 19:35:23 +00:00
|
|
|
|
def set_prompt(self,prompt):
|
|
|
|
|
key = "prompt_%s"%prompt.lower()
|
|
|
|
|
if key in self.theme:
|
|
|
|
|
colors = self.theme[key]
|
|
|
|
|
else:
|
|
|
|
|
#default color is green
|
|
|
|
|
colors = ["green"]
|
|
|
|
|
open_color = ""
|
|
|
|
|
close_color = ""
|
|
|
|
|
for c in colors:
|
|
|
|
|
if c in offthemes.colors:
|
|
|
|
|
ansi = offthemes.colors[c]
|
|
|
|
|
else:
|
|
|
|
|
ansi = ["32","39"]
|
|
|
|
|
open_color += "%s;"%ansi[0]
|
|
|
|
|
close_color += "%s;"%ansi[1]
|
|
|
|
|
#removing the last ";"
|
|
|
|
|
if open_color.endswith(";"):
|
|
|
|
|
open_color = open_color[:-1]
|
|
|
|
|
if close_color.endswith(";"):
|
|
|
|
|
close_color = close_color[:-1]
|
|
|
|
|
self.prompt = "\001\x1b[%sm\002"%open_color + prompt + "\001\x1b[%sm\002"%close_color + "> "
|
|
|
|
|
#support for 256 color mode:
|
|
|
|
|
#self.prompt = "\001\x1b[38;5;76m\002" + "ON" + "\001\x1b[38;5;255m\002" + "> " + "\001\x1b[0m\002"
|
2023-09-23 00:08:48 +00:00
|
|
|
|
return self.prompt
|
2020-05-16 16:58:53 +00:00
|
|
|
|
|
2022-11-01 23:58:41 +00:00
|
|
|
|
def complete_list(self,text,line,begidx,endidx):
|
2022-11-02 14:47:57 +00:00
|
|
|
|
allowed = []
|
2022-11-01 23:58:41 +00:00
|
|
|
|
cmds = ["create","edit","subscribe","freeze","normal","delete","help"]
|
2022-11-02 14:47:57 +00:00
|
|
|
|
lists = self.list_lists()
|
|
|
|
|
words = len(line.split())
|
|
|
|
|
# We need to autocomplete listname for the first or second argument
|
|
|
|
|
# If the first one is a cmds
|
|
|
|
|
if words <= 1:
|
|
|
|
|
allowed = lists + cmds
|
|
|
|
|
elif words == 2:
|
|
|
|
|
# if text, the completing word is the second
|
|
|
|
|
cond = bool(text)
|
|
|
|
|
if text:
|
|
|
|
|
allowed = lists + cmds
|
|
|
|
|
else:
|
|
|
|
|
current_cmd = line.split()[1]
|
|
|
|
|
if current_cmd in ["help", "create"]:
|
|
|
|
|
allowed = []
|
|
|
|
|
elif current_cmd in cmds:
|
2023-03-12 06:47:38 +00:00
|
|
|
|
allowed = lists
|
2022-11-02 14:47:57 +00:00
|
|
|
|
elif words == 3 and text != "":
|
|
|
|
|
current_cmd = line.split()[1]
|
|
|
|
|
if current_cmd in ["help", "create"]:
|
|
|
|
|
allowed = []
|
|
|
|
|
elif current_cmd in cmds:
|
2023-03-12 06:47:38 +00:00
|
|
|
|
allowed = lists
|
2022-12-07 22:23:31 +00:00
|
|
|
|
return [i+" " for i in allowed if i.startswith(text)]
|
2022-11-01 23:58:41 +00:00
|
|
|
|
|
|
|
|
|
def complete_add(self,text,line,begidx,endidx):
|
2022-11-02 14:47:57 +00:00
|
|
|
|
if len(line.split()) == 2 and text != "":
|
|
|
|
|
allowed = self.list_lists()
|
|
|
|
|
elif len(line.split()) == 1:
|
|
|
|
|
allowed = self.list_lists()
|
|
|
|
|
else:
|
|
|
|
|
allowed = []
|
2022-12-07 22:23:31 +00:00
|
|
|
|
return [i+" " for i in allowed if i.startswith(text)]
|
2022-11-01 23:58:41 +00:00
|
|
|
|
def complete_move(self,text,line,begidx,endidx):
|
2022-11-02 14:47:57 +00:00
|
|
|
|
return self.complete_add(text,line,begidx,endidx)
|
2023-12-24 23:14:04 +00:00
|
|
|
|
def complete_tour(self,text,line,begidx,endidx):
|
|
|
|
|
return self.complete_add(text,line,begidx,endidx)
|
2023-08-30 22:27:54 +00:00
|
|
|
|
|
|
|
|
|
def complete_theme(self,text,line,begidx,endidx):
|
|
|
|
|
elements = offthemes.default
|
|
|
|
|
colors = offthemes.colors
|
|
|
|
|
words = len(line.split())
|
|
|
|
|
if words <= 1:
|
|
|
|
|
allowed = elements
|
|
|
|
|
elif words == 2 and text != "":
|
|
|
|
|
allowed = elements
|
|
|
|
|
else:
|
|
|
|
|
allowed = colors
|
|
|
|
|
return [i+" " for i in allowed if i.startswith(text)]
|
|
|
|
|
|
2022-11-01 23:58:41 +00:00
|
|
|
|
|
2023-07-30 21:35:34 +00:00
|
|
|
|
def get_renderer(self,url=None):
|
|
|
|
|
# If launched without argument, we return the renderer for the current URL
|
2023-07-31 16:37:30 +00:00
|
|
|
|
if not url: url = self.current_url
|
2023-08-30 22:27:54 +00:00
|
|
|
|
return self.opencache.get_renderer(url,theme=self.theme)
|
2023-07-28 16:31:04 +00:00
|
|
|
|
|
2023-08-15 09:36:15 +00:00
|
|
|
|
def _go_to_url(self, url, update_hist=True, force_refresh=False, handle=True,\
|
2023-08-16 12:57:51 +00:00
|
|
|
|
grep=None,name=None, mode=None,limit_size=False):
|
2021-12-30 15:03:08 +00:00
|
|
|
|
"""This method might be considered "the heart of Offpunk".
|
2019-06-22 12:58:21 +00:00
|
|
|
|
Everything involved in fetching a gemini resource happens here:
|
2023-03-12 06:47:38 +00:00
|
|
|
|
sending the request over the network, parsing the response,
|
2022-01-08 13:16:55 +00:00
|
|
|
|
storing the response in a temporary file, choosing
|
|
|
|
|
and calling a handler program, and updating the history.
|
|
|
|
|
Nothing is returned."""
|
2023-08-03 21:17:12 +00:00
|
|
|
|
if not url:
|
2022-01-18 21:19:43 +00:00
|
|
|
|
return
|
2023-08-13 21:19:55 +00:00
|
|
|
|
url,newmode = unmode_url(url)
|
|
|
|
|
if not mode: mode = newmode
|
2023-08-13 13:40:57 +00:00
|
|
|
|
#we don’t handle the name anymore !
|
2023-08-03 21:17:12 +00:00
|
|
|
|
if name:
|
|
|
|
|
print("We don’t handle name of URL: %s"%name)
|
2019-10-15 19:12:32 +00:00
|
|
|
|
# Obey permanent redirects
|
2023-07-28 16:08:45 +00:00
|
|
|
|
if url in self.permanent_redirects:
|
2023-08-03 21:17:12 +00:00
|
|
|
|
self._go_to_url(self.permanent_redirects[url],update_hist=update_hist,\
|
2023-08-15 09:36:15 +00:00
|
|
|
|
force_refresh=force_refresh, handle=handle, name=name,mode=mode,\
|
2023-08-16 12:57:51 +00:00
|
|
|
|
limit_size=limit_size,grep=grep)
|
2019-10-15 19:12:32 +00:00
|
|
|
|
return
|
2023-08-16 11:09:25 +00:00
|
|
|
|
# Code to translate URLs to better frontends (think twitter.com -> nitter)
|
|
|
|
|
parsed = urllib.parse.urlparse(url)
|
|
|
|
|
netloc = parsed.netloc
|
|
|
|
|
if netloc.startswith("www."):
|
|
|
|
|
netloc = netloc[4:]
|
2023-08-17 09:56:27 +00:00
|
|
|
|
#we block/redirect even subdomains
|
|
|
|
|
for key in self.redirects.keys():
|
2023-08-31 19:35:08 +00:00
|
|
|
|
match = key == netloc
|
|
|
|
|
if key.startswith("*"):
|
|
|
|
|
match = netloc.endswith(key[1:])
|
|
|
|
|
if match:
|
2023-08-17 09:56:27 +00:00
|
|
|
|
if self.redirects[key] == "blocked":
|
|
|
|
|
text = "This website has been blocked.\n"
|
|
|
|
|
text += "Use the redirect command to unblock it."
|
2023-08-24 13:31:08 +00:00
|
|
|
|
if handle and not self.sync_only:
|
2023-08-17 09:56:27 +00:00
|
|
|
|
print(text)
|
|
|
|
|
return
|
|
|
|
|
else:
|
|
|
|
|
parsed = parsed._replace(netloc = self.redirects[key])
|
|
|
|
|
url = urllib.parse.urlunparse(parsed)
|
2023-08-11 14:09:28 +00:00
|
|
|
|
params = {}
|
|
|
|
|
params["timeout"] = self.options["short_timeout"]
|
2023-08-11 21:31:33 +00:00
|
|
|
|
if limit_size:
|
|
|
|
|
params["max_size"] = int(self.options["max_size_download"])*1000000
|
2023-08-11 14:09:28 +00:00
|
|
|
|
params["print_error"] = not self.sync_only
|
2023-08-14 13:41:11 +00:00
|
|
|
|
params["interactive"] = not self.sync_only
|
2023-08-11 14:09:28 +00:00
|
|
|
|
params["offline"] = self.offline_only
|
2023-08-16 11:09:25 +00:00
|
|
|
|
params["accept_bad_ssl_certificates"] = self.options["accept_bad_ssl_certificates"]
|
2023-08-24 13:14:49 +00:00
|
|
|
|
if mode:
|
|
|
|
|
params["images_mode"] = mode
|
|
|
|
|
else:
|
|
|
|
|
params["images_mode"] = self.options["images_mode"]
|
2023-08-15 09:36:15 +00:00
|
|
|
|
if force_refresh:
|
2023-08-11 21:31:33 +00:00
|
|
|
|
params["validity"] = 1
|
2023-08-15 09:36:15 +00:00
|
|
|
|
elif not self.offline_only:
|
|
|
|
|
#A cache is always valid at least 60seconds
|
|
|
|
|
params["validity"] = 60
|
2023-08-11 14:09:28 +00:00
|
|
|
|
# Use cache or mark as to_fetch if resource is not cached
|
2023-08-11 21:31:33 +00:00
|
|
|
|
if handle and not self.sync_only:
|
2023-12-01 16:14:22 +00:00
|
|
|
|
displayed, url = self.opencache.opnk(url,mode=mode,grep=grep,theme=self.theme,**params)
|
2023-08-13 10:29:32 +00:00
|
|
|
|
modedurl = mode_url(url,mode)
|
2023-08-11 21:31:33 +00:00
|
|
|
|
if not displayed:
|
2023-08-13 13:40:57 +00:00
|
|
|
|
#if we can’t display, we mark to sync what is not local
|
2023-09-19 12:24:18 +00:00
|
|
|
|
if not is_local(url) and not netcache.is_cache_valid(url):
|
2023-08-13 13:40:57 +00:00
|
|
|
|
self.get_list("to_fetch")
|
|
|
|
|
r = self.list_add_line("to_fetch",url=modedurl,verbose=False)
|
|
|
|
|
if r:
|
|
|
|
|
print("%s not available, marked for syncing"%url)
|
|
|
|
|
else:
|
|
|
|
|
print("%s already marked for syncing"%url)
|
2023-07-28 16:08:45 +00:00
|
|
|
|
else:
|
2023-08-11 14:09:28 +00:00
|
|
|
|
self.page_index = 0
|
|
|
|
|
# Update state (external files are not added to history)
|
2024-01-05 23:27:51 +00:00
|
|
|
|
self.current_url = modedurl
|
2023-08-11 14:09:28 +00:00
|
|
|
|
if update_hist and not self.sync_only:
|
2023-08-13 10:29:32 +00:00
|
|
|
|
self._update_history(modedurl)
|
2023-08-11 21:31:33 +00:00
|
|
|
|
else:
|
|
|
|
|
#we are asked not to handle or in sync_only mode
|
2023-11-08 15:37:13 +00:00
|
|
|
|
if self.support_http or not parsed.scheme in ["http","https"] :
|
|
|
|
|
netcache.fetch(url,**params)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
2022-11-20 11:19:03 +00:00
|
|
|
|
@needs_gi
|
2023-07-31 15:56:08 +00:00
|
|
|
|
def _show_lookup(self, offset=0, end=None, show_url=False):
|
|
|
|
|
for n, u in enumerate(self.get_renderer().get_links()[offset:end]):
|
|
|
|
|
index = n+offset+1
|
|
|
|
|
line = "[%s] %s" %(index,u)
|
|
|
|
|
#TODO: implement proper listing of url (with protocol and show_url)
|
|
|
|
|
# protocol = "" if gi.scheme == "gemini" else " %s" % gi.scheme
|
|
|
|
|
# line = "[%d%s] %s" % (index, protocol, gi.name or gi.url)
|
|
|
|
|
# line += " (%s)" % gi.url
|
|
|
|
|
print(line)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
2023-08-03 21:17:12 +00:00
|
|
|
|
def _update_history(self, url):
|
2022-01-31 14:32:34 +00:00
|
|
|
|
# We never update while in sync_only
|
2023-08-31 13:49:20 +00:00
|
|
|
|
# We don’t add history to itself.
|
|
|
|
|
if self.sync_only or not url or url == "list:///history":
|
2022-01-31 14:32:34 +00:00
|
|
|
|
return
|
2023-09-04 18:59:32 +00:00
|
|
|
|
#First, we call get_list to create history if needed
|
|
|
|
|
self.get_list("history")
|
2022-01-26 18:35:05 +00:00
|
|
|
|
links = self.list_get_links("history")
|
|
|
|
|
length = len(links)
|
2023-08-31 13:49:20 +00:00
|
|
|
|
#Don’t update history if we are back/forwarding through it
|
2023-08-03 21:17:12 +00:00
|
|
|
|
if length > 0 and links[self.hist_index] == url:
|
2019-06-22 12:58:21 +00:00
|
|
|
|
return
|
2023-08-31 13:49:20 +00:00
|
|
|
|
if length > self.options["history_size"]:
|
|
|
|
|
length = self.options["history_size"]
|
2022-01-26 18:35:05 +00:00
|
|
|
|
self.list_add_top("history",limit=self.options["history_size"],truncate_lines=self.hist_index)
|
|
|
|
|
self.hist_index = 0
|
|
|
|
|
|
2019-06-22 12:58:21 +00:00
|
|
|
|
# Cmd implementation follows
|
|
|
|
|
def default(self, line):
|
|
|
|
|
if line.strip() == "EOF":
|
|
|
|
|
return self.onecmd("quit")
|
|
|
|
|
elif line.startswith("/"):
|
2022-01-24 16:04:03 +00:00
|
|
|
|
return self.do_find(line[1:])
|
2019-06-22 12:58:21 +00:00
|
|
|
|
# Expand abbreviated commands
|
|
|
|
|
first_word = line.split()[0].strip()
|
|
|
|
|
if first_word in _ABBREVS:
|
|
|
|
|
full_cmd = _ABBREVS[first_word]
|
|
|
|
|
expanded = line.replace(first_word, full_cmd, 1)
|
|
|
|
|
return self.onecmd(expanded)
|
2022-01-18 13:42:09 +00:00
|
|
|
|
# Try to access it like an URL
|
|
|
|
|
if looks_like_url(line):
|
|
|
|
|
return self.do_go(line)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
# Try to parse numerical index for lookup table
|
|
|
|
|
try:
|
|
|
|
|
n = int(line.strip())
|
|
|
|
|
except ValueError:
|
|
|
|
|
print("What?")
|
|
|
|
|
return
|
2023-08-03 21:17:12 +00:00
|
|
|
|
# if we have no url, there's nothing to do
|
2023-07-30 21:35:34 +00:00
|
|
|
|
if self.current_url is None:
|
2022-12-10 23:18:05 +00:00
|
|
|
|
print("No links to index")
|
|
|
|
|
return
|
2023-07-30 21:35:34 +00:00
|
|
|
|
else:
|
|
|
|
|
r = self.get_renderer()
|
|
|
|
|
if r:
|
|
|
|
|
url = r.get_link(n)
|
2023-07-31 07:34:12 +00:00
|
|
|
|
self._go_to_url(url)
|
2023-07-30 21:35:34 +00:00
|
|
|
|
else:
|
|
|
|
|
print("No page with links")
|
|
|
|
|
return
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
|
|
|
|
### Settings
|
2022-07-21 15:02:36 +00:00
|
|
|
|
def do_redirect(self,line):
|
|
|
|
|
"""Display and manage the list of redirected URLs. This features is mostly useful to use privacy-friendly frontends for popular websites."""
|
|
|
|
|
if len(line.split()) == 1:
|
|
|
|
|
if line in self.redirects:
|
|
|
|
|
print("%s is redirected to %s" %(line,self.redirects[line]))
|
|
|
|
|
else:
|
|
|
|
|
print("Please add a destination to redirect %s" %line)
|
|
|
|
|
elif len(line.split()) >= 2:
|
|
|
|
|
orig, dest = line.split(" ",1)
|
|
|
|
|
if dest.lower() == "none":
|
|
|
|
|
if orig in self.redirects:
|
|
|
|
|
self.redirects.pop(orig)
|
|
|
|
|
print("Redirection for %s has been removed"%orig)
|
|
|
|
|
else:
|
|
|
|
|
print("%s was not redirected. Nothing has changed."%orig)
|
|
|
|
|
elif dest.lower() == "block":
|
|
|
|
|
self.redirects[orig] = "blocked"
|
|
|
|
|
print("%s will now be blocked"%orig)
|
|
|
|
|
else:
|
|
|
|
|
self.redirects[orig] = dest
|
|
|
|
|
print("%s will now be redirected to %s" %(orig,dest))
|
|
|
|
|
else:
|
|
|
|
|
toprint="Current redirections:\n"
|
|
|
|
|
toprint+="--------------------\n"
|
|
|
|
|
for r in self.redirects:
|
|
|
|
|
toprint += ("%s\t->\t%s\n" %(r,self.redirects[r]))
|
|
|
|
|
toprint +="\nTo add new, use \"redirect origine.com destination.org\""
|
|
|
|
|
toprint +="\nTo remove a redirect, use \"redirect origine.com NONE\""
|
|
|
|
|
toprint +="\nTo completely block a website, use \"redirect origine.com BLOCK\""
|
2023-08-31 19:35:08 +00:00
|
|
|
|
toprint +="\nTo block also subdomains, prefix with *: \"redirect *origine.com BLOCK\""
|
2022-07-21 15:02:36 +00:00
|
|
|
|
print(toprint)
|
2022-11-30 19:57:23 +00:00
|
|
|
|
|
2019-06-22 12:58:21 +00:00
|
|
|
|
def do_set(self, line):
|
|
|
|
|
"""View or set various options."""
|
|
|
|
|
if not line.strip():
|
|
|
|
|
# Show all current settings
|
|
|
|
|
for option in sorted(self.options.keys()):
|
|
|
|
|
print("%s %s" % (option, self.options[option]))
|
2022-03-30 13:46:25 +00:00
|
|
|
|
elif len(line.split()) == 1 :
|
2019-08-13 10:04:07 +00:00
|
|
|
|
# Show current value of one specific setting
|
2019-06-22 12:58:21 +00:00
|
|
|
|
option = line.strip()
|
2022-07-21 15:02:36 +00:00
|
|
|
|
if option in self.options:
|
2019-06-22 12:58:21 +00:00
|
|
|
|
print("%s %s" % (option, self.options[option]))
|
|
|
|
|
else:
|
|
|
|
|
print("Unrecognised option %s" % option)
|
|
|
|
|
else:
|
2019-08-13 10:04:07 +00:00
|
|
|
|
# Set value of one specific setting
|
2019-06-22 12:58:21 +00:00
|
|
|
|
option, value = line.split(" ", 1)
|
|
|
|
|
if option not in self.options:
|
|
|
|
|
print("Unrecognised option %s" % option)
|
|
|
|
|
return
|
2019-08-13 10:04:07 +00:00
|
|
|
|
# Validate / convert values
|
2020-05-19 21:14:09 +00:00
|
|
|
|
elif option == "tls_mode":
|
|
|
|
|
if value.lower() not in ("ca", "tofu"):
|
|
|
|
|
print("TLS mode must be `ca` or `tofu`!")
|
|
|
|
|
return
|
2022-11-16 13:44:47 +00:00
|
|
|
|
elif option == "accept_bad_ssl_certificates":
|
2023-03-30 09:28:58 +00:00
|
|
|
|
if value.lower() == "false":
|
2022-11-16 13:44:47 +00:00
|
|
|
|
print("Only high security certificates are now accepted")
|
|
|
|
|
elif value.lower() == "true":
|
|
|
|
|
print("Low security SSL certificates are now accepted")
|
|
|
|
|
else:
|
|
|
|
|
print("accept_bad_ssl_certificates should be True or False")
|
|
|
|
|
return
|
2022-02-12 12:12:37 +00:00
|
|
|
|
elif option == "width":
|
|
|
|
|
if value.isnumeric():
|
|
|
|
|
value = int(value)
|
|
|
|
|
print("changing width to ",value)
|
2023-07-04 11:55:01 +00:00
|
|
|
|
term_width(new_width=value)
|
2023-08-28 12:54:24 +00:00
|
|
|
|
self.opencache.cleanup()
|
2022-02-12 12:12:37 +00:00
|
|
|
|
else:
|
|
|
|
|
print("%s is not a valid width (integer required)"%value)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
elif value.isnumeric():
|
|
|
|
|
value = int(value)
|
|
|
|
|
elif value.lower() == "false":
|
|
|
|
|
value = False
|
|
|
|
|
elif value.lower() == "true":
|
|
|
|
|
value = True
|
|
|
|
|
else:
|
|
|
|
|
try:
|
|
|
|
|
value = float(value)
|
|
|
|
|
except ValueError:
|
|
|
|
|
pass
|
|
|
|
|
self.options[option] = value
|
2023-08-30 22:27:54 +00:00
|
|
|
|
def do_theme(self,line):
|
|
|
|
|
"""Change the colors of your rendered text.
|
|
|
|
|
|
|
|
|
|
"theme ELEMENT COLOR"
|
|
|
|
|
|
|
|
|
|
ELEMENT is one of: window_title, window_subtitle, title,
|
2023-11-25 10:44:16 +00:00
|
|
|
|
subtitle,subsubtitle,link,oneline_link,new_link,image_link,preformatted,blockquote.
|
2023-08-30 22:27:54 +00:00
|
|
|
|
|
|
|
|
|
COLOR is one or many (separated by space) of: bold, faint, italic, underline, black,
|
|
|
|
|
red, green, yellow, blue, purple, cyan, white.
|
|
|
|
|
|
|
|
|
|
Each color can alternatively be prefaced with "bright_"."""
|
|
|
|
|
words = line.split()
|
|
|
|
|
le = len(words)
|
|
|
|
|
if le == 0:
|
|
|
|
|
t = self.get_renderer("list:///").get_theme()
|
|
|
|
|
for e in t:
|
|
|
|
|
print("%s set to %s"%(e,t[e]))
|
|
|
|
|
else:
|
|
|
|
|
element = words[0]
|
|
|
|
|
if element not in offthemes.default.keys():
|
|
|
|
|
print("%s is not a valid theme element"%element)
|
|
|
|
|
print("Valid theme elements are: ")
|
|
|
|
|
valid = []
|
|
|
|
|
for k in offthemes.default:
|
|
|
|
|
valid.append(k)
|
|
|
|
|
print(valid)
|
|
|
|
|
else:
|
|
|
|
|
if le == 1:
|
|
|
|
|
if element in self.theme.keys():
|
|
|
|
|
value = self.theme[element]
|
|
|
|
|
else:
|
|
|
|
|
value = offthemes.default[element]
|
|
|
|
|
print("%s is set to %s"%(element,str(value)))
|
|
|
|
|
else:
|
|
|
|
|
#Now we parse the colors
|
|
|
|
|
for w in words[1:]:
|
|
|
|
|
if w not in offthemes.colors.keys():
|
|
|
|
|
print("%s is not a valid color"%w)
|
|
|
|
|
print("Valid colors are one of: ")
|
|
|
|
|
valid = []
|
|
|
|
|
for k in offthemes.colors:
|
|
|
|
|
valid.append(k)
|
|
|
|
|
print(valid)
|
|
|
|
|
return
|
|
|
|
|
self.theme[element] = words[1:]
|
|
|
|
|
self.opencache.cleanup()
|
2023-09-14 19:35:23 +00:00
|
|
|
|
#now we upadte the prompt
|
|
|
|
|
if self.offline_only:
|
|
|
|
|
self.set_prompt("OFF")
|
|
|
|
|
else:
|
|
|
|
|
self.set_prompt("ON")
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
|
|
|
|
def do_handler(self, line):
|
|
|
|
|
"""View or set handler commands for different MIME types."""
|
|
|
|
|
if not line.strip():
|
|
|
|
|
# Show all current handlers
|
2023-08-10 23:28:58 +00:00
|
|
|
|
h = self.opencache.get_handlers()
|
|
|
|
|
for mime in sorted(h.keys()):
|
|
|
|
|
print("%s %s" % (mime, h[mime]))
|
2019-06-22 12:58:21 +00:00
|
|
|
|
elif len(line.split()) == 1:
|
|
|
|
|
mime = line.strip()
|
2023-08-10 23:28:58 +00:00
|
|
|
|
h = self.opencache.get_handlers(mime=mime)
|
|
|
|
|
if h:
|
|
|
|
|
print("%s %s" % (mime, h))
|
2019-06-22 12:58:21 +00:00
|
|
|
|
else:
|
|
|
|
|
print("No handler set for MIME type %s" % mime)
|
|
|
|
|
else:
|
|
|
|
|
mime, handler = line.split(" ", 1)
|
2023-08-10 23:28:58 +00:00
|
|
|
|
self.opencache.set_handler(mime,handler)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
2020-05-27 13:16:22 +00:00
|
|
|
|
def do_abbrevs(self, *args):
|
2021-12-30 15:03:08 +00:00
|
|
|
|
"""Print all Offpunk command abbreviations."""
|
2020-06-13 10:39:18 +00:00
|
|
|
|
header = "Command Abbreviations:"
|
|
|
|
|
self.stdout.write("\n{}\n".format(str(header)))
|
2020-05-27 13:16:22 +00:00
|
|
|
|
if self.ruler:
|
|
|
|
|
self.stdout.write("{}\n".format(str(self.ruler * len(header))))
|
|
|
|
|
for k, v in _ABBREVS.items():
|
|
|
|
|
self.stdout.write("{:<7} {}\n".format(k, v))
|
2020-06-13 10:39:18 +00:00
|
|
|
|
self.stdout.write("\n")
|
2020-05-27 13:16:22 +00:00
|
|
|
|
|
2021-12-06 15:43:16 +00:00
|
|
|
|
def do_offline(self, *args):
|
2021-12-30 15:03:08 +00:00
|
|
|
|
"""Use Offpunk offline by only accessing cached content"""
|
2021-12-06 15:43:16 +00:00
|
|
|
|
if self.offline_only:
|
2021-12-16 09:43:25 +00:00
|
|
|
|
print("Offline and undisturbed.")
|
2021-12-06 15:43:16 +00:00
|
|
|
|
else:
|
|
|
|
|
self.offline_only = True
|
2023-09-14 19:35:23 +00:00
|
|
|
|
self.set_prompt("OFF")
|
2021-12-30 15:03:08 +00:00
|
|
|
|
print("Offpunk is now offline and will only access cached content")
|
2023-03-12 06:47:38 +00:00
|
|
|
|
|
2021-12-16 09:43:25 +00:00
|
|
|
|
def do_online(self, *args):
|
2021-12-30 15:03:08 +00:00
|
|
|
|
"""Use Offpunk online with a direct connection"""
|
2023-03-12 06:47:38 +00:00
|
|
|
|
if self.offline_only:
|
2021-12-16 09:43:25 +00:00
|
|
|
|
self.offline_only = False
|
2023-09-14 19:35:23 +00:00
|
|
|
|
self.set_prompt("ON")
|
2021-12-30 15:03:08 +00:00
|
|
|
|
print("Offpunk is online and will access the network")
|
2021-12-16 09:43:25 +00:00
|
|
|
|
else:
|
|
|
|
|
print("Already online. Try offline.")
|
2021-12-06 15:43:16 +00:00
|
|
|
|
|
2022-03-15 22:43:21 +00:00
|
|
|
|
def do_copy(self, arg):
|
2023-12-03 12:18:12 +00:00
|
|
|
|
"""Copy the content of the last visited page as gemtext/html in the clipboard.
|
2022-01-05 20:12:59 +00:00
|
|
|
|
Use with "url" as argument to only copy the adress.
|
2023-12-03 12:18:12 +00:00
|
|
|
|
Use with "raw" to copy ANSI content as seen in your terminal (with colour codes).
|
|
|
|
|
Use with "cache" to copy the path of the cached content.
|
|
|
|
|
Use with "title" to copy the title of the page.
|
|
|
|
|
Use with "link" to copy a link in the gemtext format to that page with the title.
|
|
|
|
|
"""
|
2023-08-03 21:17:12 +00:00
|
|
|
|
if self.current_url:
|
2022-02-14 22:02:53 +00:00
|
|
|
|
if _HAS_XSEL:
|
2022-03-15 22:43:21 +00:00
|
|
|
|
args = arg.split()
|
2022-01-05 20:12:59 +00:00
|
|
|
|
if args and args[0] == "url":
|
2022-03-15 22:43:21 +00:00
|
|
|
|
if len(args) > 1 and args[1].isdecimal():
|
2023-07-31 11:19:28 +00:00
|
|
|
|
url = self.get_renderer().get_link(int(args[1])-1)
|
2022-03-15 22:43:21 +00:00
|
|
|
|
else:
|
2023-08-13 21:19:55 +00:00
|
|
|
|
url,mode = unmode_url(self.current_url)
|
2023-12-03 12:18:12 +00:00
|
|
|
|
print(url)
|
Avoid passing improperly escaped paths to shell
This should fix https://notabug.org/ploum/offpunk/issues/9 . This
involves a few closely-related changes to subprogram execution:
- If a path, url or file contents were being passed using `cat` or
`echo`, the code was changed to pass the file/string on stdin. This
also makes several pipelines into single programs and should allow for
the removal of `shell=True` in the future.
- For `file`, `xdg-open` and `less`, which either can't accept their
input on stdin or otherwise use the path, the paths are now being
escaped with `shlex.quote()`.
- Finally, the environment variable $LESSHISTFILE is now being set in
python code, where escaping is not necessary.
Notably, the argument to `grep` in `less_cmd()` is not quoted in this
commit, since I was unsure of how it was meant to be used. If the
argument is not already quoted, this should probably be passed through
`shlex.quote()`.
This does not do the following, which may be desired:
- This does not disable `shell=True` anywhere, since `subprocess.run()`
requires the command to be already split into a list of strings. I
think this would just require a `shlex.split()` in `run()` when this
is disabled, but it may require more thought.
- Some of the invoked programs (with the notable exception of `echo` and
`xdg-open`) support the use of "--" to prevent any following arguments
from being treated as program flags if they start with "-". I don't
believe there are any paths that start with "-", but it may make sense
to include this where possible.
I have briefly tested this commit, but it touches quite a few code
paths, so there might be bugs that I missed.
2022-11-30 22:14:03 +00:00
|
|
|
|
run("xsel -b -i", input=url, direct_output=True)
|
2022-01-05 20:12:59 +00:00
|
|
|
|
elif args and args[0] == "raw":
|
2023-08-17 12:16:40 +00:00
|
|
|
|
tmp = self.opencache.get_temp_filename(self.current_url)
|
|
|
|
|
if tmp:
|
|
|
|
|
run("xsel -b -i", input=open(tmp, "rb"),\
|
2023-08-03 21:17:12 +00:00
|
|
|
|
direct_output=True)
|
2022-03-14 21:36:34 +00:00
|
|
|
|
elif args and args[0] == "cache":
|
2023-07-31 17:01:57 +00:00
|
|
|
|
run("xsel -b -i", input=netcache.get_cache_path(self.current_url),\
|
|
|
|
|
direct_output=True)
|
2023-12-03 12:18:12 +00:00
|
|
|
|
elif args and args[0] == "title":
|
|
|
|
|
title = self.get_renderer().get_page_title()
|
|
|
|
|
run("xsel -b -i",input=title, direct_output=True)
|
|
|
|
|
print(title)
|
|
|
|
|
elif args and args[0] == "link":
|
|
|
|
|
link = "=> %s %s"%(unmode_url(self.current_url)[0],\
|
|
|
|
|
self.get_renderer().get_page_title())
|
|
|
|
|
print(link)
|
|
|
|
|
run("xsel -b -i", input=link,direct_output=True)
|
2022-01-05 20:12:59 +00:00
|
|
|
|
else:
|
2023-07-31 17:01:57 +00:00
|
|
|
|
run("xsel -b -i", input=open(netcache.get_cache_path(self.current_url), "rb"),\
|
|
|
|
|
direct_output=True)
|
2022-01-05 20:12:59 +00:00
|
|
|
|
else:
|
|
|
|
|
print("Please install xsel to use copy")
|
|
|
|
|
else:
|
|
|
|
|
print("No content to copy, visit a page first")
|
|
|
|
|
|
2019-06-22 12:58:21 +00:00
|
|
|
|
### Stuff for getting around
|
|
|
|
|
def do_go(self, line):
|
|
|
|
|
"""Go to a gemini URL or marked item."""
|
|
|
|
|
line = line.strip()
|
|
|
|
|
if not line:
|
2021-12-22 12:20:10 +00:00
|
|
|
|
if shutil.which('xsel'):
|
|
|
|
|
clipboards = []
|
|
|
|
|
urls = []
|
2022-03-15 23:41:56 +00:00
|
|
|
|
for selec in ["-p","-s","-b"]:
|
|
|
|
|
try:
|
2022-03-30 17:23:44 +00:00
|
|
|
|
clipboards.append(run("xsel "+selec))
|
2022-03-15 23:41:56 +00:00
|
|
|
|
except Exception as err:
|
|
|
|
|
#print("Skippink clipboard %s because %s"%(selec,err))
|
|
|
|
|
pass
|
2021-12-22 12:20:10 +00:00
|
|
|
|
for u in clipboards:
|
2022-03-15 23:41:56 +00:00
|
|
|
|
if "://" in u and looks_like_url(u) and u not in urls :
|
2021-12-22 12:20:10 +00:00
|
|
|
|
urls.append(u)
|
|
|
|
|
if len(urls) > 1:
|
2023-03-12 06:47:38 +00:00
|
|
|
|
stri = "URLs in your clipboard\n"
|
2022-10-07 12:45:44 +00:00
|
|
|
|
counter = 0
|
2021-12-22 12:20:10 +00:00
|
|
|
|
for u in urls:
|
2022-10-07 12:45:44 +00:00
|
|
|
|
counter += 1
|
|
|
|
|
stri += "[%s] %s\n"%(counter,u)
|
|
|
|
|
stri += "Where do you want to go today ?> "
|
|
|
|
|
ans = input(stri)
|
|
|
|
|
if ans.isdigit() and 0 < int(ans) <= len(urls):
|
|
|
|
|
self.do_go(urls[int(ans)-1])
|
2021-12-22 12:20:10 +00:00
|
|
|
|
elif len(urls) == 1:
|
|
|
|
|
self.do_go(urls[0])
|
|
|
|
|
else:
|
2022-03-15 23:41:56 +00:00
|
|
|
|
print("Go where? (hint: simply copy an URL in your clipboard)")
|
2021-12-22 12:20:10 +00:00
|
|
|
|
else:
|
|
|
|
|
print("Go where? (hint: install xsel to go to copied URLs)")
|
|
|
|
|
|
2019-06-22 12:58:21 +00:00
|
|
|
|
# First, check for possible marks
|
|
|
|
|
elif line in self.marks:
|
2023-08-03 21:17:12 +00:00
|
|
|
|
url = self.marks[line]
|
|
|
|
|
self._go_to_url(url)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
# or a local file
|
|
|
|
|
elif os.path.exists(os.path.expanduser(line)):
|
2023-08-03 21:17:12 +00:00
|
|
|
|
self._go_to_url(line)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
# If this isn't a mark, treat it as a URL
|
2022-01-11 13:04:20 +00:00
|
|
|
|
elif looks_like_url(line):
|
2023-08-03 21:17:12 +00:00
|
|
|
|
self._go_to_url(line)
|
2023-11-12 14:01:58 +00:00
|
|
|
|
elif "://" not in line and "default_protocol" in self.options.keys()\
|
|
|
|
|
and looks_like_url(self.options["default_protocol"]+"://"+line):
|
|
|
|
|
self._go_to_url(self.options["default_protocol"]+"://"+line)
|
2022-03-22 22:17:47 +00:00
|
|
|
|
else:
|
|
|
|
|
print("%s is not a valid URL to go"%line)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
|
|
|
|
@needs_gi
|
|
|
|
|
def do_reload(self, *args):
|
|
|
|
|
"""Reload the current URL."""
|
2023-08-15 11:50:45 +00:00
|
|
|
|
if self.offline_only and not is_local(self.current_url):
|
2022-01-23 22:14:06 +00:00
|
|
|
|
self.get_list("to_fetch")
|
2023-08-03 21:17:12 +00:00
|
|
|
|
r = self.list_add_line("to_fetch",url=self.current_url,verbose=False)
|
2022-02-15 13:56:35 +00:00
|
|
|
|
if r:
|
2023-08-03 21:17:12 +00:00
|
|
|
|
print("%s marked for syncing" %self.current_url)
|
2022-02-15 13:56:35 +00:00
|
|
|
|
else:
|
2023-08-03 21:17:12 +00:00
|
|
|
|
print("%s already marked for syncing" %self.current_url)
|
2021-12-14 15:33:17 +00:00
|
|
|
|
else:
|
2023-08-31 08:53:24 +00:00
|
|
|
|
self.opencache.cleanup()
|
2023-08-15 11:50:45 +00:00
|
|
|
|
self._go_to_url(self.current_url, force_refresh=False)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
|
|
|
|
@needs_gi
|
|
|
|
|
def do_up(self, *args):
|
2022-03-08 21:21:44 +00:00
|
|
|
|
"""Go up one directory in the path.
|
|
|
|
|
Take an integer as argument to go up multiple times."""
|
|
|
|
|
level = 1
|
|
|
|
|
if args[0].isnumeric():
|
|
|
|
|
level = int(args[0])
|
|
|
|
|
elif args[0] != "":
|
|
|
|
|
print("Up only take integer as arguments")
|
2023-08-03 21:17:12 +00:00
|
|
|
|
#TODO : implement up, this code is copy/pasted from GeminiItem
|
2023-08-13 21:19:55 +00:00
|
|
|
|
url, mode = unmode_url(self.current_url)
|
|
|
|
|
parsed = urllib.parse.urlparse(url)
|
2023-08-12 22:07:07 +00:00
|
|
|
|
path = parsed.path.rstrip('/')
|
2023-08-03 14:54:29 +00:00
|
|
|
|
count = 0
|
|
|
|
|
while count < level:
|
|
|
|
|
pathbits = list(os.path.split(path))
|
|
|
|
|
# Don't try to go higher than root or in config
|
2023-08-13 21:19:55 +00:00
|
|
|
|
if is_local(url) or len(pathbits) == 1 :
|
2023-08-12 22:07:07 +00:00
|
|
|
|
break
|
2023-08-03 14:54:29 +00:00
|
|
|
|
# Get rid of bottom component
|
|
|
|
|
if len(pathbits) > 1:
|
|
|
|
|
pathbits.pop()
|
|
|
|
|
path = os.path.join(*pathbits)
|
|
|
|
|
count += 1
|
2023-08-12 22:07:07 +00:00
|
|
|
|
if parsed.scheme == "gopher":
|
2023-08-03 14:54:29 +00:00
|
|
|
|
path = "/1" + path
|
2023-08-12 22:07:07 +00:00
|
|
|
|
newurl = urllib.parse.urlunparse((parsed.scheme,parsed.netloc,path,"","",""))
|
|
|
|
|
self._go_to_url(newurl)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
|
|
|
|
def do_back(self, *args):
|
|
|
|
|
"""Go back to the previous gemini item."""
|
2022-01-26 18:35:05 +00:00
|
|
|
|
histfile = self.get_list("history")
|
|
|
|
|
links = self.list_get_links("history")
|
|
|
|
|
if self.hist_index >= len(links) -1:
|
2019-06-22 12:58:21 +00:00
|
|
|
|
return
|
2022-01-26 18:35:05 +00:00
|
|
|
|
self.hist_index += 1
|
2023-08-03 21:17:12 +00:00
|
|
|
|
url = links[self.hist_index]
|
|
|
|
|
self._go_to_url(url, update_hist=False)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
|
|
|
|
def do_forward(self, *args):
|
|
|
|
|
"""Go forward to the next gemini item."""
|
2022-01-26 18:35:05 +00:00
|
|
|
|
histfile = self.get_list("history")
|
|
|
|
|
links = self.list_get_links("history")
|
|
|
|
|
if self.hist_index <= 0:
|
2019-06-22 12:58:21 +00:00
|
|
|
|
return
|
2022-01-26 18:35:05 +00:00
|
|
|
|
self.hist_index -= 1
|
2023-08-03 21:17:12 +00:00
|
|
|
|
url = links[self.hist_index]
|
|
|
|
|
self._go_to_url(url, update_hist=False)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
|
|
|
|
@needs_gi
|
|
|
|
|
def do_root(self, *args):
|
|
|
|
|
"""Go to root selector of the server hosting current item."""
|
2023-08-14 20:27:21 +00:00
|
|
|
|
parse = urllib.parse.urlparse(self.current_url)
|
|
|
|
|
self._go_to_url(urllib.parse.urlunparse((parse.scheme,parse.netloc,"/","","","")))
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
|
|
|
|
def do_tour(self, line):
|
|
|
|
|
"""Add index items as waypoints on a tour, which is basically a FIFO
|
|
|
|
|
queue of gemini items.
|
|
|
|
|
|
2022-04-06 10:14:57 +00:00
|
|
|
|
`tour` or `t` alone brings you to the next item in your tour.
|
2019-06-22 12:58:21 +00:00
|
|
|
|
Items can be added with `tour 1 2 3 4` or ranges like `tour 1-4`.
|
|
|
|
|
All items in current menu can be added with `tour *`.
|
2022-09-25 21:33:36 +00:00
|
|
|
|
All items in $LIST can be added with `tour $LIST`.
|
2022-01-03 15:40:52 +00:00
|
|
|
|
Current item can be added back to the end of the tour with `tour .`.
|
2019-06-22 12:58:21 +00:00
|
|
|
|
Current tour can be listed with `tour ls` and scrubbed with `tour clear`."""
|
2022-01-23 22:14:06 +00:00
|
|
|
|
# Creating the tour list if needed
|
2023-03-12 06:47:38 +00:00
|
|
|
|
self.get_list("tour")
|
2019-06-22 12:58:21 +00:00
|
|
|
|
line = line.strip()
|
|
|
|
|
if not line:
|
|
|
|
|
# Fly to next waypoint on tour
|
2022-01-23 22:14:06 +00:00
|
|
|
|
if len(self.list_get_links("tour")) < 1:
|
2019-06-22 12:58:21 +00:00
|
|
|
|
print("End of tour.")
|
|
|
|
|
else:
|
2022-02-15 20:57:14 +00:00
|
|
|
|
url = self.list_go_to_line("1","tour")
|
|
|
|
|
if url:
|
|
|
|
|
self.list_rm_url(url,"tour")
|
2019-06-22 12:58:21 +00:00
|
|
|
|
elif line == "ls":
|
2022-01-23 16:09:05 +00:00
|
|
|
|
self.list_show("tour")
|
2019-06-22 12:58:21 +00:00
|
|
|
|
elif line == "clear":
|
2022-01-23 22:14:06 +00:00
|
|
|
|
for l in self.list_get_links("tour"):
|
2023-08-03 14:54:29 +00:00
|
|
|
|
self.list_rm_url(l,"tour")
|
2019-06-22 12:58:21 +00:00
|
|
|
|
elif line == "*":
|
2023-07-31 15:56:08 +00:00
|
|
|
|
for l in self.get_renderer().get_links():
|
2023-08-03 21:17:12 +00:00
|
|
|
|
self.list_add_line("tour",url=l,verbose=False)
|
2022-01-03 15:40:52 +00:00
|
|
|
|
elif line == ".":
|
2022-01-23 22:14:06 +00:00
|
|
|
|
self.list_add_line("tour",verbose=False)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
elif looks_like_url(line):
|
2023-08-03 21:17:12 +00:00
|
|
|
|
self.list_add_line("tour",url=line)
|
2022-09-25 21:33:36 +00:00
|
|
|
|
elif line in self.list_lists():
|
|
|
|
|
list_path = self.list_path(line)
|
|
|
|
|
if not list_path:
|
|
|
|
|
print("List %s does not exist. Cannot add it to tour"%(list))
|
|
|
|
|
else:
|
2023-07-31 15:56:08 +00:00
|
|
|
|
url = "list:///%s"%line
|
2022-09-25 21:33:36 +00:00
|
|
|
|
display = not self.sync_only
|
2023-07-31 15:56:08 +00:00
|
|
|
|
for l in self.get_renderer(url).get_links():
|
2023-08-03 21:17:12 +00:00
|
|
|
|
self.list_add_line("tour",url=l,verbose=False)
|
2023-10-19 11:34:26 +00:00
|
|
|
|
elif self.current_url:
|
2019-06-22 12:58:21 +00:00
|
|
|
|
for index in line.split():
|
|
|
|
|
try:
|
|
|
|
|
pair = index.split('-')
|
|
|
|
|
if len(pair) == 1:
|
|
|
|
|
# Just a single index
|
|
|
|
|
n = int(index)
|
2023-07-30 21:35:34 +00:00
|
|
|
|
url = self.get_renderer().get_link(n)
|
2023-08-03 21:17:12 +00:00
|
|
|
|
self.list_add_line("tour",url=url,verbose=False)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
elif len(pair) == 2:
|
|
|
|
|
# Two endpoints for a range of indices
|
2021-08-25 06:01:16 +00:00
|
|
|
|
if int(pair[0]) < int(pair[1]):
|
|
|
|
|
for n in range(int(pair[0]), int(pair[1]) + 1):
|
2023-07-30 21:35:34 +00:00
|
|
|
|
url = self.get_renderer().get_link(n)
|
2023-08-03 21:17:12 +00:00
|
|
|
|
self.list_add_line("tour",url=url,verbose=False)
|
2021-08-25 06:01:16 +00:00
|
|
|
|
else:
|
|
|
|
|
for n in range(int(pair[0]), int(pair[1]) - 1, -1):
|
2023-07-30 21:35:34 +00:00
|
|
|
|
url = self.get_renderer().get_link(n)
|
2023-08-03 21:17:12 +00:00
|
|
|
|
self.list_add_line("tour",url=url,verbose=False)
|
2021-08-25 06:01:16 +00:00
|
|
|
|
|
2019-06-22 12:58:21 +00:00
|
|
|
|
else:
|
|
|
|
|
# Syntax error
|
|
|
|
|
print("Invalid use of range syntax %s, skipping" % index)
|
|
|
|
|
except ValueError:
|
|
|
|
|
print("Non-numeric index %s, skipping." % index)
|
|
|
|
|
except IndexError:
|
|
|
|
|
print("Invalid index %d, skipping." % n)
|
|
|
|
|
|
|
|
|
|
@needs_gi
|
|
|
|
|
def do_mark(self, line):
|
|
|
|
|
"""Mark the current item with a single letter. This letter can then
|
|
|
|
|
be passed to the 'go' command to return to the current item later.
|
2022-03-08 21:21:44 +00:00
|
|
|
|
Think of it like marks in vi: 'mark a'='ma' and 'go a'=''a'.
|
|
|
|
|
Marks are temporary until shutdown (not saved to disk)."""
|
2019-06-22 12:58:21 +00:00
|
|
|
|
line = line.strip()
|
|
|
|
|
if not line:
|
|
|
|
|
for mark, gi in self.marks.items():
|
2019-08-13 16:56:15 +00:00
|
|
|
|
print("[%s] %s (%s)" % (mark, gi.name, gi.url))
|
2019-06-22 12:58:21 +00:00
|
|
|
|
elif line.isalpha() and len(line) == 1:
|
2023-08-03 21:17:12 +00:00
|
|
|
|
self.marks[line] = self.current_url
|
2019-06-22 12:58:21 +00:00
|
|
|
|
else:
|
|
|
|
|
print("Invalid mark, must be one letter")
|
2023-03-12 06:47:38 +00:00
|
|
|
|
|
2022-02-18 14:52:27 +00:00
|
|
|
|
@needs_gi
|
|
|
|
|
def do_info(self,line):
|
|
|
|
|
"""Display information about current page."""
|
2023-07-30 21:35:34 +00:00
|
|
|
|
renderer = self.get_renderer()
|
2023-08-13 21:19:55 +00:00
|
|
|
|
url,mode = unmode_url(self.current_url)
|
2023-07-30 14:59:32 +00:00
|
|
|
|
out = renderer.get_page_title() + "\n\n"
|
|
|
|
|
out += "URL : " + url + "\n"
|
|
|
|
|
out += "Mime : " + renderer.get_mime() + "\n"
|
|
|
|
|
out += "Cache : " + netcache.get_cache_path(url) + "\n"
|
2023-08-03 21:17:12 +00:00
|
|
|
|
if self.get_renderer() :
|
|
|
|
|
rend = str(self.get_renderer().__class__)
|
2022-02-18 14:52:27 +00:00
|
|
|
|
rend = rend.lstrip("<class '__main__.").rstrip("'>")
|
|
|
|
|
else:
|
2022-03-06 15:02:30 +00:00
|
|
|
|
rend = "None"
|
2022-02-19 20:16:47 +00:00
|
|
|
|
out += "Renderer : " + rend + "\n\n"
|
|
|
|
|
lists = []
|
|
|
|
|
for l in self.list_lists():
|
2023-08-13 21:19:55 +00:00
|
|
|
|
if self.list_has_url(url,l):
|
2022-02-19 20:16:47 +00:00
|
|
|
|
lists.append(l)
|
|
|
|
|
if len(lists) > 0:
|
|
|
|
|
out += "Page appeard in following lists :\n"
|
|
|
|
|
for l in lists:
|
|
|
|
|
if not self.list_is_system(l):
|
|
|
|
|
status = "normal list"
|
|
|
|
|
if self.list_is_subscribed(l):
|
|
|
|
|
status = "subscription"
|
|
|
|
|
elif self.list_is_frozen(l):
|
|
|
|
|
status = "frozen list"
|
2022-03-19 19:58:18 +00:00
|
|
|
|
out += " • %s\t(%s)\n" %(l,status)
|
2022-02-19 20:16:47 +00:00
|
|
|
|
for l in lists:
|
|
|
|
|
if self.list_is_system(l):
|
2022-03-19 19:58:18 +00:00
|
|
|
|
out += " • %s\n" %l
|
2022-02-19 20:16:47 +00:00
|
|
|
|
else:
|
|
|
|
|
out += "Page is not save in any list"
|
2022-02-18 14:52:27 +00:00
|
|
|
|
print(out)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
2020-05-10 12:34:48 +00:00
|
|
|
|
def do_version(self, line):
|
2022-02-14 22:02:53 +00:00
|
|
|
|
"""Display version and system information."""
|
|
|
|
|
def has(value):
|
|
|
|
|
if value:
|
|
|
|
|
return "\t\x1b[1;32mInstalled\x1b[0m\n"
|
|
|
|
|
else:
|
|
|
|
|
return "\t\x1b[1;31mNot Installed\x1b[0m\n"
|
2023-03-12 06:47:38 +00:00
|
|
|
|
output = "Offpunk " + __version__ + "\n"
|
2022-02-14 22:02:53 +00:00
|
|
|
|
output += "===========\n"
|
2022-03-29 11:28:20 +00:00
|
|
|
|
output += "Highly recommended:\n"
|
2023-07-28 16:31:04 +00:00
|
|
|
|
output += " - python-cryptography : " + has(netcache._HAS_CRYPTOGRAPHY)
|
2023-08-11 14:09:28 +00:00
|
|
|
|
output += " - xdg-open : " + has(opnk._HAS_XDGOPEN)
|
2022-03-29 11:28:20 +00:00
|
|
|
|
output += "\nWeb browsing:\n"
|
2023-11-08 15:37:13 +00:00
|
|
|
|
output += " - python-requests : " + has(netcache._DO_HTTP)
|
2023-09-04 18:59:32 +00:00
|
|
|
|
output += " - python-feedparser : " + has(ansicat._DO_FEED)
|
|
|
|
|
output += " - python-bs4 : " + has(ansicat._HAS_SOUP)
|
|
|
|
|
output += " - python-readability : " + has(ansicat._HAS_READABILITY)
|
|
|
|
|
output += " - timg 1.3.2+ : " + has(ansicat._NEW_TIMG)
|
|
|
|
|
if ansicat._NEW_CHAFA:
|
|
|
|
|
output += " - chafa 1.10+ : " + has(ansicat._HAS_CHAFA)
|
2022-03-13 15:01:12 +00:00
|
|
|
|
else:
|
2023-09-04 18:59:32 +00:00
|
|
|
|
output += " - chafa : " + has(ansicat._HAS_CHAFA)
|
|
|
|
|
output += " - python-pil : " + has(ansicat._HAS_PIL)
|
2022-03-29 11:28:20 +00:00
|
|
|
|
output += "\nNice to have:\n"
|
|
|
|
|
output += " - python-setproctitle : " + has(_HAS_SETPROCTITLE)
|
|
|
|
|
output += " - xsel : " + has(_HAS_XSEL)
|
2022-02-14 22:02:53 +00:00
|
|
|
|
|
|
|
|
|
output += "\nFeatures :\n"
|
2023-09-04 18:59:32 +00:00
|
|
|
|
if ansicat._NEW_CHAFA:
|
|
|
|
|
output += " - Render images (chafa or timg) : " + has(ansicat._RENDER_IMAGE)
|
2022-03-22 08:07:37 +00:00
|
|
|
|
else:
|
2023-09-04 18:59:32 +00:00
|
|
|
|
output += " - Render images (python-pil, chafa or timg) : " + has(ansicat._RENDER_IMAGE)
|
|
|
|
|
output += " - Render HTML (bs4, readability) : " + has(ansicat._DO_HTML)
|
|
|
|
|
output += " - Render Atom/RSS feeds (feedparser) : " + has(ansicat._DO_FEED)
|
2023-11-08 15:37:13 +00:00
|
|
|
|
output += " - Connect to http/https (requests) : " + has(netcache._DO_HTTP)
|
2023-09-04 18:59:32 +00:00
|
|
|
|
output += " - Detect text encoding (python-chardet) : " + has(netcache._HAS_CHARDET)
|
2022-03-21 21:43:33 +00:00
|
|
|
|
output += " - copy to/from clipboard (xsel) : " + has(_HAS_XSEL)
|
2023-09-04 18:59:32 +00:00
|
|
|
|
output += " - restore last position (less 572+) : " + has(opnk._LESS_RESTORE_POSITION)
|
2022-02-14 22:02:53 +00:00
|
|
|
|
output += "\n"
|
2023-12-04 10:10:20 +00:00
|
|
|
|
output += "Config directory : " + xdg("config") + "\n"
|
|
|
|
|
output += "User Data directory : " + xdg("data") + "\n"
|
|
|
|
|
output += "Cache directoy : " + xdg("cache")
|
2022-02-14 22:02:53 +00:00
|
|
|
|
|
|
|
|
|
print(output)
|
2020-05-10 12:34:48 +00:00
|
|
|
|
|
2019-06-22 12:58:21 +00:00
|
|
|
|
### Stuff that modifies the lookup table
|
|
|
|
|
def do_ls(self, line):
|
|
|
|
|
"""List contents of current index.
|
|
|
|
|
Use 'ls -l' to see URLs."""
|
2023-07-31 15:56:08 +00:00
|
|
|
|
self._show_lookup(show_url = "-l" in line)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
self.page_index = 0
|
|
|
|
|
|
2022-11-14 20:33:40 +00:00
|
|
|
|
def do_search(self,line):
|
|
|
|
|
"""Search on Gemini using the engine configured (by default kennedy.gemi.dev)
|
|
|
|
|
You can configure it using "set search URL".
|
|
|
|
|
URL should contains one "%s" that will be replaced by the search term."""
|
|
|
|
|
search = urllib.parse.quote(line)
|
|
|
|
|
url = self.options["search"]%search
|
2023-08-03 21:17:12 +00:00
|
|
|
|
self._go_to_url(url)
|
2022-11-14 20:33:40 +00:00
|
|
|
|
|
|
|
|
|
def do_wikipedia(self,line):
|
|
|
|
|
"""Search on wikipedia using the configured Gemini interface.
|
|
|
|
|
The first word should be the two letters code for the language.
|
|
|
|
|
Exemple : "wikipedia en Gemini protocol"
|
|
|
|
|
But you can also use abbreviations to go faster:
|
|
|
|
|
"wen Gemini protocol". (your abbreviation might be missing, report the bug)
|
|
|
|
|
The interface used can be modified with the command:
|
|
|
|
|
"set wikipedia URL" where URL should contains two "%s", the first
|
|
|
|
|
one used for the language, the second for the search string."""
|
|
|
|
|
words = line.split(" ",maxsplit=1)
|
|
|
|
|
if len(words[0]) == 2:
|
|
|
|
|
lang = words[0]
|
|
|
|
|
search = urllib.parse.quote(words[1])
|
|
|
|
|
else:
|
|
|
|
|
lang = "en"
|
|
|
|
|
search = urllib.parse.quote(line)
|
|
|
|
|
url = self.options["wikipedia"]%(lang,search)
|
2023-08-03 21:17:12 +00:00
|
|
|
|
self._go_to_url(url)
|
2022-11-14 20:33:40 +00:00
|
|
|
|
|
2020-05-15 11:38:51 +00:00
|
|
|
|
def do_gus(self, line):
|
2021-04-27 07:55:12 +00:00
|
|
|
|
"""Submit a search query to the geminispace.info search engine."""
|
fix crash on calling 'gus' without query parameter
Apologies for the additional work and thanks for accepting the patch!
Please find it attached.
Best,
Von
Nov 26, 2023, 10:20 by sourcehut23@ploum.eu:
> On 23/11/26 08:04, vonhohenheiden@tutanota.com wrote:
> >This is a simple fix for a crash I observed when calling 'gus' without any parameters. The following stacktrace can be reproduced by ´python3 offpunk.py <http://offpunk.py> > gus [Enter]´
>
>
> Hi,
>
> Thanks for catching this. The patch looks good but cannot apply. It
> might be because the stacktrace is also considered as a patch by git am
> (that’s my only explanation).
>
> Do you mind sending it again without the stacktrace or as an attachment.
>
> A good practice in offpunk is also to add a line in the CHANGELOG (with
> your name at the end). If not done, don’t worry, I will do it in a
> later commit.
>
> Thanks!
>
> Ploum
>
>>
>>
> >---
> >Traceback (most recent call last):
> > File "offpunk/offpunk.py", line 1910, in <module>
> > main()
> > File "offpunk/offpunk.py", line 1905, in main
> > gc.cmdloop()
> > File "python3.11/cmd.py", line 138, in cmdloop
> > stop = self.onecmd(line)
> > ^^^^^^^^^^^^^^^^^
> > File "python3.11/cmd.py", line 217, in onecmd
> > return func(arg)
> > ^^^^^^^^^
> > File "offpunk/offpunk.py", line 949, in do_gus
> > self._go_to_url(urllib.parse.urlunparse("gemini","geminispace.info <http://geminispace.info>","/search","",line,""))
> > ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
> >TypeError: urlunparse() takes 1 positional argument but 6 were given
> >---
>
>>
>>
> >Proposed patch follows.
> >Thanks,
> >Von
>
>>
>>
> >Signed-off-by: vonhohenheiden <vonhohenheiden@tutanota.com>
> >---
> >offpunk.py <http://offpunk.py> | 3 +++
> >1 file changed, 3 insertions(+)
>
>>
>>
> >diff --git a/offpunk.py b/offpunk.py
> >index a76ac0a..6e82c72 100755
> >--- a/offpunk.py
> >+++ b/offpunk.py
> >@@ -945,6 +945,9 @@ Use 'ls -l' to see URLs."""
> >
> > def do_gus(self, line):
> > """Submit a search query to the geminispace.info <http://geminispace.info> search engine."""
> >+ if not line:
> >+ print("What?")
> >+ return
> > self._go_to_url(urllib.parse.urlunparse("gemini","geminispace.info <http://geminispace.info>","/search","",line,""))
> >
> > def do_history(self, *args):
> >--
> >2.43.0
>
>>
>>
>
> --
> Ploum - Lionel Dricot
> Blog: https://www.ploum.net
> Livres: https://ploum.net/livres.html
>
>From 8ffc15145bad3a74c7771d488df3cb751c4b8039 Mon Sep 17 00:00:00 2001
From: vonhohenheiden <vonhohenheiden@tutanota.com>
Date: Sun, 26 Nov 2023 07:38:19 +0100
Subject: [PATCH] fix crash on calling 'gus' without parameters
Signed-off-by: vonhohenheiden <vonhohenheiden@tutanota.com>
2023-11-26 18:19:09 +00:00
|
|
|
|
if not line:
|
|
|
|
|
print("What?")
|
|
|
|
|
return
|
2024-01-06 20:24:51 +00:00
|
|
|
|
search = line.replace(" ","%20")
|
|
|
|
|
self._go_to_url("gemini://geminispace.info/search?%s"%search)
|
2020-05-15 11:38:51 +00:00
|
|
|
|
|
2019-06-22 12:58:21 +00:00
|
|
|
|
def do_history(self, *args):
|
|
|
|
|
"""Display history."""
|
2022-01-26 18:35:05 +00:00
|
|
|
|
self.list_show("history")
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
2022-03-25 12:37:23 +00:00
|
|
|
|
@needs_gi
|
2022-01-24 16:04:03 +00:00
|
|
|
|
def do_find(self, searchterm):
|
2022-03-25 12:37:23 +00:00
|
|
|
|
"""Find in current page by displaying only relevant lines (grep)."""
|
2023-08-16 12:57:51 +00:00
|
|
|
|
self._go_to_url(self.current_url,update_hist=False,grep=searchterm)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
|
|
|
|
def emptyline(self):
|
|
|
|
|
"""Page through index ten lines at a time."""
|
|
|
|
|
i = self.page_index
|
2023-08-03 21:17:12 +00:00
|
|
|
|
if not self.current_url or i > len(self.get_renderer().get_links()):
|
2019-06-22 12:58:21 +00:00
|
|
|
|
return
|
|
|
|
|
self._show_lookup(offset=i, end=i+10)
|
|
|
|
|
self.page_index += 10
|
|
|
|
|
|
|
|
|
|
### Stuff that does something to most recently viewed item
|
|
|
|
|
@needs_gi
|
|
|
|
|
def do_cat(self, *args):
|
|
|
|
|
"""Run most recently visited item through "cat" command."""
|
2023-08-17 12:16:40 +00:00
|
|
|
|
run("cat", input=open(self.opencache.get_temp_filename(self.current_url), "rb"),\
|
|
|
|
|
direct_output=True)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
|
|
|
|
@needs_gi
|
2022-03-07 22:16:17 +00:00
|
|
|
|
def do_view(self, *args):
|
2022-02-17 16:00:24 +00:00
|
|
|
|
"""Run most recently visited item through "less" command, restoring \
|
|
|
|
|
previous position.
|
2022-03-23 15:19:23 +00:00
|
|
|
|
Use "view normal" to see the default article view on html page.
|
2022-03-07 22:16:17 +00:00
|
|
|
|
Use "view full" to see a complete html page instead of the article view.
|
|
|
|
|
Use "view feed" to see the the linked feed of the page (in any).
|
|
|
|
|
Use "view feeds" to see available feeds on this page.
|
2023-12-06 14:11:18 +00:00
|
|
|
|
Use "view XX" where XX is a number to view information about link XX.
|
2022-03-07 22:16:17 +00:00
|
|
|
|
(full, feed, feeds have no effect on non-html content)."""
|
2023-08-03 21:17:12 +00:00
|
|
|
|
if self.current_url and args and args[0] != "":
|
2023-08-13 21:19:55 +00:00
|
|
|
|
u, m = unmode_url(self.current_url)
|
2023-10-07 21:30:09 +00:00
|
|
|
|
if args[0] in ["full","debug","source"]:
|
2023-08-03 21:17:12 +00:00
|
|
|
|
self._go_to_url(self.current_url,mode=args[0])
|
2022-03-23 15:19:23 +00:00
|
|
|
|
elif args[0] in ["normal","readable"]:
|
2023-08-03 21:17:12 +00:00
|
|
|
|
self._go_to_url(self.current_url,mode="readable")
|
2022-03-07 22:16:17 +00:00
|
|
|
|
elif args[0] == "feed":
|
2023-08-03 14:54:29 +00:00
|
|
|
|
subs = self.get_renderer().get_subscribe_links()
|
2022-03-07 22:16:17 +00:00
|
|
|
|
if len(subs) > 1:
|
|
|
|
|
self.do_go(subs[1][0])
|
2022-03-13 15:01:12 +00:00
|
|
|
|
elif "rss" in subs[0][1] or "atom" in subs[0][1]:
|
2023-08-13 21:19:55 +00:00
|
|
|
|
print("%s is already a feed" %u)
|
2022-03-07 22:16:17 +00:00
|
|
|
|
else:
|
2023-08-13 21:19:55 +00:00
|
|
|
|
print("No other feed found on %s"%u)
|
2022-03-07 22:16:17 +00:00
|
|
|
|
elif args[0] == "feeds":
|
2023-08-03 14:54:29 +00:00
|
|
|
|
subs = self.get_renderer().get_subscribe_links()
|
2023-03-12 06:47:38 +00:00
|
|
|
|
stri = "Available views :\n"
|
2022-03-07 22:16:17 +00:00
|
|
|
|
counter = 0
|
|
|
|
|
for s in subs:
|
|
|
|
|
counter += 1
|
|
|
|
|
stri += "[%s] %s [%s]\n"%(counter,s[0],s[1])
|
|
|
|
|
stri += "Which view do you want to see ? >"
|
|
|
|
|
ans = input(stri)
|
|
|
|
|
if ans.isdigit() and 0 < int(ans) <= len(subs):
|
|
|
|
|
self.do_go(subs[int(ans)-1][0])
|
2023-12-06 14:11:18 +00:00
|
|
|
|
elif args[0].isdigit():
|
|
|
|
|
link_url = self.get_renderer().get_link(int(args[0]))
|
|
|
|
|
if link_url:
|
|
|
|
|
print("Link %s is: %s"%(args[0],link_url))
|
|
|
|
|
if netcache.is_cache_valid(link_url):
|
|
|
|
|
last_modified = netcache.cache_last_modified(link_url)
|
|
|
|
|
link_renderer = self.get_renderer(link_url)
|
2024-02-11 11:14:52 +00:00
|
|
|
|
if link_renderer:
|
|
|
|
|
link_title = link_renderer.get_page_title()
|
|
|
|
|
print(link_title)
|
|
|
|
|
else:
|
|
|
|
|
print("Empty cached version")
|
2023-12-06 14:11:18 +00:00
|
|
|
|
print("Last cached on %s"%time.ctime(last_modified))
|
|
|
|
|
else:
|
|
|
|
|
print("No cached version for this link")
|
|
|
|
|
|
2022-03-07 22:16:17 +00:00
|
|
|
|
else:
|
2023-12-06 14:11:18 +00:00
|
|
|
|
print("Valid argument for view are : normal, full, feed, feeds or a number")
|
2022-01-25 10:49:31 +00:00
|
|
|
|
else:
|
2023-08-03 21:17:12 +00:00
|
|
|
|
self._go_to_url(self.current_url)
|
2023-03-12 06:47:38 +00:00
|
|
|
|
|
2022-02-12 13:24:43 +00:00
|
|
|
|
@needs_gi
|
|
|
|
|
def do_open(self, *args):
|
|
|
|
|
"""Open current item with the configured handler or xdg-open.
|
2022-04-29 08:41:08 +00:00
|
|
|
|
Uses "open url" to open current URL in a browser.
|
|
|
|
|
see "handler" command to set your handler."""
|
2023-08-13 21:19:55 +00:00
|
|
|
|
u, m = unmode_url(self.current_url)
|
2022-04-29 08:41:08 +00:00
|
|
|
|
if args[0] == "url":
|
2023-08-13 21:19:55 +00:00
|
|
|
|
run("xdg-open %s", parameter=u, direct_output=True)
|
2022-04-29 08:41:08 +00:00
|
|
|
|
else:
|
2023-08-13 21:19:55 +00:00
|
|
|
|
self.opencache.opnk(u,terminal=False)
|
2022-02-12 13:24:43 +00:00
|
|
|
|
|
2019-06-22 12:58:21 +00:00
|
|
|
|
@needs_gi
|
|
|
|
|
def do_shell(self, line):
|
2022-03-08 21:21:44 +00:00
|
|
|
|
"""'cat' most recently visited item through a shell pipeline.
|
|
|
|
|
'!' is an useful shortcut."""
|
2023-08-17 12:16:40 +00:00
|
|
|
|
tmp = self.opencache.get_temp_filename(self.current_url)
|
|
|
|
|
if tmp:
|
|
|
|
|
run(line, input=open(tmp, "rb"), direct_output=True)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
|
|
|
|
@needs_gi
|
|
|
|
|
def do_save(self, line):
|
|
|
|
|
"""Save an item to the filesystem.
|
|
|
|
|
'save n filename' saves menu item n to the specified filename.
|
|
|
|
|
'save filename' saves the last viewed item to the specified filename.
|
|
|
|
|
'save n' saves menu item n to an automagic filename."""
|
|
|
|
|
args = line.strip().split()
|
|
|
|
|
# First things first, figure out what our arguments are
|
|
|
|
|
if len(args) == 0:
|
|
|
|
|
# No arguments given at all
|
|
|
|
|
# Save current item, if there is one, to a file whose name is
|
|
|
|
|
# inferred from the gemini path
|
2023-07-31 17:01:57 +00:00
|
|
|
|
if not netcache.is_cache_valid(self.current_url):
|
2022-01-01 21:05:02 +00:00
|
|
|
|
print("You cannot save if not cached!")
|
2019-06-22 12:58:21 +00:00
|
|
|
|
return
|
|
|
|
|
else:
|
|
|
|
|
index = None
|
|
|
|
|
filename = None
|
|
|
|
|
elif len(args) == 1:
|
|
|
|
|
# One argument given
|
|
|
|
|
# If it's numeric, treat it as an index, and infer the filename
|
|
|
|
|
try:
|
|
|
|
|
index = int(args[0])
|
|
|
|
|
filename = None
|
|
|
|
|
# If it's not numeric, treat it as a filename and
|
|
|
|
|
# save the current item
|
|
|
|
|
except ValueError:
|
|
|
|
|
index = None
|
|
|
|
|
filename = os.path.expanduser(args[0])
|
|
|
|
|
elif len(args) == 2:
|
|
|
|
|
# Two arguments given
|
|
|
|
|
# Treat first as an index and second as filename
|
|
|
|
|
index, filename = args
|
|
|
|
|
try:
|
|
|
|
|
index = int(index)
|
|
|
|
|
except ValueError:
|
|
|
|
|
print("First argument is not a valid item index!")
|
|
|
|
|
return
|
|
|
|
|
filename = os.path.expanduser(filename)
|
|
|
|
|
else:
|
|
|
|
|
print("You must provide an index, a filename, or both.")
|
|
|
|
|
return
|
|
|
|
|
# Next, fetch the item to save, if it's not the current one.
|
|
|
|
|
if index:
|
2023-08-03 21:17:12 +00:00
|
|
|
|
last_url = self.current_url
|
2019-06-22 12:58:21 +00:00
|
|
|
|
try:
|
2023-07-30 21:35:34 +00:00
|
|
|
|
url = self.get_renderer().get_link(index)
|
|
|
|
|
self._go_to_url(url, update_hist = False, handle = False)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
except IndexError:
|
|
|
|
|
print ("Index too high!")
|
2023-08-03 21:17:12 +00:00
|
|
|
|
self.current_url = last_url
|
2019-06-22 12:58:21 +00:00
|
|
|
|
return
|
|
|
|
|
else:
|
2023-08-13 13:20:01 +00:00
|
|
|
|
url = self.current_url
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
|
|
|
|
# Derive filename from current GI's path, if one hasn't been set
|
|
|
|
|
if not filename:
|
2023-08-13 13:20:01 +00:00
|
|
|
|
filename = os.path.basename(netcache.get_cache_path(self.current_url))
|
2019-06-22 12:58:21 +00:00
|
|
|
|
# Check for filename collisions and actually do the save if safe
|
|
|
|
|
if os.path.exists(filename):
|
|
|
|
|
print("File %s already exists!" % filename)
|
|
|
|
|
else:
|
|
|
|
|
# Don't use _get_active_tmpfile() here, because we want to save the
|
2021-12-30 15:03:08 +00:00
|
|
|
|
# "source code" of menus, not the rendered view - this way Offpunk
|
2019-06-22 12:58:21 +00:00
|
|
|
|
# can navigate to it later.
|
2023-07-31 17:01:57 +00:00
|
|
|
|
path = netcache.get_cache_path(url)
|
2022-03-19 11:17:02 +00:00
|
|
|
|
if os.path.isdir(path):
|
|
|
|
|
print("Can’t save %s because it’s a folder, not a file"%path)
|
|
|
|
|
else:
|
|
|
|
|
print("Saved to %s" % filename)
|
|
|
|
|
shutil.copyfile(path, filename)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
|
|
|
|
# Restore gi if necessary
|
|
|
|
|
if index != None:
|
2023-08-03 21:17:12 +00:00
|
|
|
|
self._go_to_url(last_url, handle=False)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
|
|
|
|
@needs_gi
|
|
|
|
|
def do_url(self, *args):
|
|
|
|
|
"""Print URL of most recently visited item."""
|
2023-08-13 21:19:55 +00:00
|
|
|
|
url,mode = unmode_url(self.current_url)
|
|
|
|
|
print(url)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
|
|
|
|
### Bookmarking stuff
|
|
|
|
|
@needs_gi
|
|
|
|
|
def do_add(self, line):
|
2022-01-21 16:42:14 +00:00
|
|
|
|
"""Add the current URL to the list specied as argument.
|
|
|
|
|
If no argument given, URL is added to Bookmarks."""
|
|
|
|
|
args = line.split()
|
|
|
|
|
if len(args) < 1 :
|
2022-03-31 09:00:55 +00:00
|
|
|
|
list = "bookmarks"
|
|
|
|
|
if not self.list_path(list):
|
|
|
|
|
self.list_create(list)
|
|
|
|
|
self.list_add_line(list)
|
2022-01-21 16:42:14 +00:00
|
|
|
|
else:
|
|
|
|
|
self.list_add_line(args[0])
|
2023-03-12 06:47:38 +00:00
|
|
|
|
|
2022-01-23 16:09:05 +00:00
|
|
|
|
# Get the list file name, creating or migrating it if needed.
|
|
|
|
|
# Migrate bookmarks/tour/to_fetch from XDG_CONFIG to XDG_DATA
|
|
|
|
|
# We migrate only if the file exists in XDG_CONFIG and not XDG_DATA
|
|
|
|
|
def get_list(self,list):
|
|
|
|
|
list_path = self.list_path(list)
|
|
|
|
|
if not list_path:
|
2023-12-04 10:10:20 +00:00
|
|
|
|
old_file_gmi = os.path.join(xdg("config"),list + ".gmi")
|
|
|
|
|
old_file_nogmi = os.path.join(xdg("config"),list)
|
|
|
|
|
target = os.path.join(xdg("data"),"lists")
|
2022-01-23 16:09:05 +00:00
|
|
|
|
if os.path.exists(old_file_gmi):
|
|
|
|
|
shutil.move(old_file_gmi,target)
|
|
|
|
|
elif os.path.exists(old_file_nogmi):
|
|
|
|
|
targetgmi = os.path.join(target,list+".gmi")
|
|
|
|
|
shutil.move(old_file_nogmi,targetgmi)
|
|
|
|
|
else:
|
2022-01-24 14:33:33 +00:00
|
|
|
|
if list == "subscribed":
|
2022-02-15 13:56:35 +00:00
|
|
|
|
title = "Subscriptions #subscribed (new links in those pages will be added to tour)"
|
2022-01-24 14:33:33 +00:00
|
|
|
|
elif list == "to_fetch":
|
|
|
|
|
title = "Links requested and to be fetched during the next --sync"
|
|
|
|
|
else:
|
|
|
|
|
title = None
|
2022-11-05 22:14:41 +00:00
|
|
|
|
self.list_create(list, title=title,quite=True)
|
2022-01-23 16:09:05 +00:00
|
|
|
|
list_path = self.list_path(list)
|
|
|
|
|
return list_path
|
2023-03-12 06:47:38 +00:00
|
|
|
|
|
2022-12-12 14:30:49 +00:00
|
|
|
|
@needs_gi
|
2022-01-23 22:14:06 +00:00
|
|
|
|
def do_subscribe(self,line):
|
|
|
|
|
"""Subscribe to current page by saving it in the "subscribed" list.
|
|
|
|
|
If a new link is found in the page during a --sync, the new link is automatically
|
|
|
|
|
fetched and added to your next tour.
|
|
|
|
|
To unsubscribe, remove the page from the "subscribed" list."""
|
2023-08-03 14:54:29 +00:00
|
|
|
|
subs = self.get_renderer().get_subscribe_links()
|
2022-02-18 13:11:09 +00:00
|
|
|
|
if len(subs) > 1:
|
2022-02-27 21:20:42 +00:00
|
|
|
|
stri = "Multiple feeds have been found :\n"
|
2022-03-13 15:01:12 +00:00
|
|
|
|
elif "rss" in subs[0][1] or "atom" in subs[0][1] :
|
|
|
|
|
stri = "This page is already a feed:\n"
|
2022-02-27 21:20:42 +00:00
|
|
|
|
else:
|
|
|
|
|
stri = "No feed detected. You can still watch the page :\n"
|
|
|
|
|
counter = 0
|
|
|
|
|
for l in subs:
|
2022-03-09 10:34:41 +00:00
|
|
|
|
link = l[0]
|
2022-02-27 21:32:25 +00:00
|
|
|
|
already = []
|
|
|
|
|
for li in self.list_lists():
|
|
|
|
|
if self.list_is_subscribed(li):
|
|
|
|
|
if self.list_has_url(link,li):
|
|
|
|
|
already.append(li)
|
2022-02-27 21:20:42 +00:00
|
|
|
|
stri += "[%s] %s [%s]\n"%(counter+1,link,l[1])
|
2022-02-27 21:32:25 +00:00
|
|
|
|
if len(already) > 0:
|
|
|
|
|
stri += "\t -> (already subscribed through lists %s)\n"%(str(already))
|
2022-02-27 21:20:42 +00:00
|
|
|
|
counter += 1
|
|
|
|
|
stri += "\n"
|
2022-07-18 12:53:49 +00:00
|
|
|
|
stri += "Which feed do you want to subscribe ? > "
|
2022-02-27 21:20:42 +00:00
|
|
|
|
ans = input(stri)
|
|
|
|
|
if ans.isdigit() and 0 < int(ans) <= len(subs):
|
|
|
|
|
sublink,mime,title = subs[int(ans)-1]
|
2022-01-24 14:33:33 +00:00
|
|
|
|
else:
|
2022-02-27 21:20:42 +00:00
|
|
|
|
sublink,title = None,None
|
2022-02-18 13:11:09 +00:00
|
|
|
|
if sublink:
|
|
|
|
|
list_path = self.get_list("subscribed")
|
2023-08-03 21:17:12 +00:00
|
|
|
|
added = self.list_add_line("subscribed",url=sublink,verbose=False)
|
2022-02-18 13:11:09 +00:00
|
|
|
|
if added :
|
|
|
|
|
print("Subscribed to %s" %sublink)
|
|
|
|
|
else:
|
|
|
|
|
print("You are already subscribed to %s"%sublink)
|
2022-02-27 21:20:42 +00:00
|
|
|
|
else:
|
|
|
|
|
print("No subscription registered")
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
2020-03-24 19:41:37 +00:00
|
|
|
|
def do_bookmarks(self, line):
|
|
|
|
|
"""Show or access the bookmarks menu.
|
|
|
|
|
'bookmarks' shows all bookmarks.
|
|
|
|
|
'bookmarks n' navigates immediately to item n in the bookmark menu.
|
2020-05-10 20:51:33 +00:00
|
|
|
|
Bookmarks are stored using the 'add' command."""
|
2022-01-23 16:09:05 +00:00
|
|
|
|
list_path = self.get_list("bookmarks")
|
2020-03-24 19:41:37 +00:00
|
|
|
|
args = line.strip()
|
|
|
|
|
if len(args.split()) > 1 or (args and not args.isnumeric()):
|
|
|
|
|
print("bookmarks command takes a single integer argument!")
|
2022-01-23 12:52:34 +00:00
|
|
|
|
elif args:
|
|
|
|
|
self.list_go_to_line(args,"bookmarks")
|
|
|
|
|
else:
|
|
|
|
|
self.list_show("bookmarks")
|
2023-03-08 22:11:32 +00:00
|
|
|
|
|
2023-03-12 06:47:38 +00:00
|
|
|
|
@needs_gi
|
2022-01-26 17:52:38 +00:00
|
|
|
|
def do_archive(self,args):
|
|
|
|
|
"""Archive current page by removing it from every list and adding it to
|
|
|
|
|
archives, which is a special historical list limited in size. It is similar to `move archives`."""
|
|
|
|
|
for li in self.list_lists():
|
|
|
|
|
if li not in ["archives", "history"]:
|
2023-08-13 21:19:55 +00:00
|
|
|
|
u,m = unmode_url(self.current_url)
|
|
|
|
|
deleted = self.list_rm_url(u,li)
|
2022-01-26 17:52:38 +00:00
|
|
|
|
if deleted:
|
|
|
|
|
print("Removed from %s"%li)
|
|
|
|
|
self.list_add_top("archives",limit=self.options["archives_size"])
|
2023-07-30 21:35:34 +00:00
|
|
|
|
print("Archiving: %s"%self.get_renderer().get_page_title())
|
2022-02-04 23:26:51 +00:00
|
|
|
|
print("\x1b[2;34mCurrent maximum size of archives : %s\x1b[0m" %self.options["archives_size"])
|
2022-01-26 17:52:38 +00:00
|
|
|
|
|
2023-08-03 14:54:29 +00:00
|
|
|
|
#what is the line to add to a list for this url ?
|
2023-08-12 22:07:07 +00:00
|
|
|
|
def to_map_line(self,url=None):
|
|
|
|
|
if not url:
|
|
|
|
|
url = self.current_url
|
|
|
|
|
r = self.get_renderer(url)
|
2023-08-12 10:50:39 +00:00
|
|
|
|
if r:
|
2023-08-13 10:29:32 +00:00
|
|
|
|
title = r.get_page_title()
|
2023-08-12 10:50:39 +00:00
|
|
|
|
else:
|
2023-08-13 10:29:32 +00:00
|
|
|
|
title = ""
|
2023-08-25 12:14:06 +00:00
|
|
|
|
toreturn = "=> {} {}\n".format(url,title)
|
|
|
|
|
return toreturn
|
2023-08-03 14:54:29 +00:00
|
|
|
|
|
2023-08-03 21:17:12 +00:00
|
|
|
|
def list_add_line(self,list,url=None,verbose=True):
|
2022-01-22 14:08:06 +00:00
|
|
|
|
list_path = self.list_path(list)
|
2022-11-05 22:14:41 +00:00
|
|
|
|
if not list_path and self.list_is_system(list):
|
|
|
|
|
self.list_create(list,quite=True)
|
2022-11-09 22:30:54 +00:00
|
|
|
|
list_path = self.list_path(list)
|
|
|
|
|
if not list_path:
|
2022-01-17 13:32:46 +00:00
|
|
|
|
print("List %s does not exist. Create it with ""list create %s"""%(list,list))
|
2022-01-22 14:08:06 +00:00
|
|
|
|
return False
|
2022-01-17 13:32:46 +00:00
|
|
|
|
else:
|
2023-08-03 21:17:12 +00:00
|
|
|
|
if not url:
|
2024-01-05 23:27:51 +00:00
|
|
|
|
url = self.current_url
|
|
|
|
|
unmoded_url,mode = unmode_url(url)
|
2022-01-22 14:08:06 +00:00
|
|
|
|
# first we check if url already exists in the file
|
2024-01-05 23:27:51 +00:00
|
|
|
|
if self.list_has_url(url,list,exact_mode=True):
|
|
|
|
|
if verbose:
|
|
|
|
|
print("%s already in %s."%(url,list))
|
|
|
|
|
return False
|
|
|
|
|
# If the URL already exists but without a mode, we update the mode
|
|
|
|
|
# FIXME: this doesn’t take into account the case where you want to remove the mode
|
|
|
|
|
elif url != unmoded_url and self.list_has_url(unmoded_url,list):
|
|
|
|
|
self.list_update_url_mode(unmoded_url,list,mode)
|
|
|
|
|
if verbose:
|
|
|
|
|
print("%s has updated mode in %s to %s"%(url,list,mode))
|
|
|
|
|
else:
|
|
|
|
|
with open(list_path,"a") as l_file:
|
|
|
|
|
l_file.write(self.to_map_line(url))
|
|
|
|
|
l_file.close()
|
|
|
|
|
if verbose:
|
|
|
|
|
print("%s added to %s" %(url,list))
|
|
|
|
|
return True
|
2023-03-12 06:47:38 +00:00
|
|
|
|
|
2023-08-03 21:17:12 +00:00
|
|
|
|
@needs_gi
|
2022-01-26 18:35:05 +00:00
|
|
|
|
def list_add_top(self,list,limit=0,truncate_lines=0):
|
2023-08-03 14:54:29 +00:00
|
|
|
|
stri = self.to_map_line().strip("\n")
|
2022-01-26 17:52:38 +00:00
|
|
|
|
if list == "archives":
|
|
|
|
|
stri += ", archived on "
|
|
|
|
|
elif list == "history":
|
|
|
|
|
stri += ", visited on "
|
|
|
|
|
else:
|
|
|
|
|
stri += ", added to %s on "%list
|
|
|
|
|
stri += time.ctime() + "\n"
|
|
|
|
|
list_path = self.get_list(list)
|
|
|
|
|
with open(list_path,"r") as l_file:
|
|
|
|
|
lines = l_file.readlines()
|
|
|
|
|
l_file.close()
|
|
|
|
|
with open(list_path,"w") as l_file:
|
2022-03-22 22:17:47 +00:00
|
|
|
|
l_file.write("#%s\n"%list)
|
2022-01-26 17:52:38 +00:00
|
|
|
|
l_file.write(stri)
|
|
|
|
|
counter = 0
|
2022-03-22 22:17:47 +00:00
|
|
|
|
# Truncating is useful in case we open a new branch
|
|
|
|
|
# after a few back in history
|
2022-01-26 18:35:05 +00:00
|
|
|
|
to_truncate = truncate_lines
|
2022-01-26 17:52:38 +00:00
|
|
|
|
for l in lines:
|
2022-03-22 22:17:47 +00:00
|
|
|
|
if not l.startswith("#"):
|
|
|
|
|
if to_truncate > 0:
|
|
|
|
|
to_truncate -= 1
|
|
|
|
|
elif limit == 0 or counter < limit:
|
|
|
|
|
l_file.write(l)
|
|
|
|
|
counter += 1
|
2022-01-26 17:52:38 +00:00
|
|
|
|
l_file.close()
|
|
|
|
|
|
2022-01-17 13:32:46 +00:00
|
|
|
|
|
2022-01-22 14:08:06 +00:00
|
|
|
|
# remove an url from a list.
|
|
|
|
|
# return True if the URL was removed
|
|
|
|
|
# return False if the URL was not found
|
|
|
|
|
def list_rm_url(self,url,list):
|
2022-02-19 20:16:47 +00:00
|
|
|
|
return self.list_has_url(url,list,deletion=True)
|
2023-03-12 06:47:38 +00:00
|
|
|
|
|
2024-01-05 23:27:51 +00:00
|
|
|
|
def list_update_url_mode(self,url,list,mode):
|
|
|
|
|
return self.list_has_url(url,list,update_mode = mode)
|
|
|
|
|
|
2022-02-19 20:16:47 +00:00
|
|
|
|
# deletion and has_url are so similar, I made them the same method
|
2024-01-05 23:27:51 +00:00
|
|
|
|
# deletion : true or false if you want to delete the URL
|
|
|
|
|
# exact_mode : True if you want to check only for the exact url, not the canonical one
|
|
|
|
|
# update_mode : a new mode to update the URL
|
|
|
|
|
def list_has_url(self,url,list,deletion=False, exact_mode=False, update_mode = None):
|
2022-01-22 14:08:06 +00:00
|
|
|
|
list_path = self.list_path(list)
|
|
|
|
|
if list_path:
|
|
|
|
|
to_return = False
|
|
|
|
|
with open(list_path,"r") as lf:
|
|
|
|
|
lines = lf.readlines()
|
|
|
|
|
lf.close()
|
2022-02-19 20:16:47 +00:00
|
|
|
|
to_write = []
|
2022-04-10 21:45:34 +00:00
|
|
|
|
# let’s remove the mode
|
2024-01-05 23:27:51 +00:00
|
|
|
|
if not exact_mode:
|
|
|
|
|
url=unmode_url(url)[0]
|
2022-02-19 20:16:47 +00:00
|
|
|
|
for l in lines:
|
|
|
|
|
# we separate components of the line
|
|
|
|
|
# to ensure we identify a complete URL, not a part of it
|
|
|
|
|
splitted = l.split()
|
2022-04-12 13:03:26 +00:00
|
|
|
|
if url not in splitted and len(splitted) > 1:
|
2023-08-13 10:29:32 +00:00
|
|
|
|
current = unmode_url(splitted[1])[0]
|
2022-02-19 20:16:47 +00:00
|
|
|
|
#sometimes, we must remove the ending "/"
|
2024-01-05 23:27:51 +00:00
|
|
|
|
if url == current or (url.endswith("/") and url[:-1] == current):
|
2022-01-22 14:08:06 +00:00
|
|
|
|
to_return = True
|
2024-01-05 23:27:51 +00:00
|
|
|
|
if update_mode:
|
|
|
|
|
new_line = l.replace(current,mode_url(url,update_mode))
|
|
|
|
|
to_write.append(new_line)
|
|
|
|
|
elif not deletion:
|
|
|
|
|
to_write.append(l)
|
2022-02-19 20:16:47 +00:00
|
|
|
|
else:
|
|
|
|
|
to_write.append(l)
|
2024-01-05 23:27:51 +00:00
|
|
|
|
elif url in splitted:
|
2022-02-19 20:16:47 +00:00
|
|
|
|
to_return = True
|
2024-01-05 23:27:51 +00:00
|
|
|
|
# We update the mode if asked by replacing the old url
|
|
|
|
|
# by a moded one in the same line
|
|
|
|
|
if update_mode:
|
|
|
|
|
new_line = l.replace(url,mode_url(url,update_mode))
|
|
|
|
|
to_write.append(new_line)
|
|
|
|
|
elif not deletion:
|
|
|
|
|
to_write.append(l)
|
|
|
|
|
else:
|
|
|
|
|
to_write.append(l)
|
|
|
|
|
if deletion or update_mode:
|
2022-02-19 20:16:47 +00:00
|
|
|
|
with open(list_path,"w") as lf:
|
|
|
|
|
for l in to_write:
|
|
|
|
|
lf.write(l)
|
|
|
|
|
lf.close()
|
2022-01-22 14:08:06 +00:00
|
|
|
|
return to_return
|
|
|
|
|
else:
|
|
|
|
|
return False
|
2022-01-17 13:32:46 +00:00
|
|
|
|
|
2022-01-23 22:14:06 +00:00
|
|
|
|
def list_get_links(self,list):
|
2022-01-23 16:09:05 +00:00
|
|
|
|
list_path = self.list_path(list)
|
2023-09-07 14:38:48 +00:00
|
|
|
|
if list_path and os.path.exists(list_path):
|
2023-07-31 11:19:28 +00:00
|
|
|
|
return self.get_renderer("list:///%s"%list).get_links()
|
2022-01-23 22:14:06 +00:00
|
|
|
|
else:
|
|
|
|
|
return []
|
2022-01-23 16:09:05 +00:00
|
|
|
|
|
2022-01-18 21:19:43 +00:00
|
|
|
|
def list_go_to_line(self,line,list):
|
2022-01-22 14:08:06 +00:00
|
|
|
|
list_path = self.list_path(list)
|
|
|
|
|
if not list_path:
|
2022-01-18 21:19:43 +00:00
|
|
|
|
print("List %s does not exist. Create it with ""list create %s"""%(list,list))
|
|
|
|
|
elif not line.isnumeric():
|
|
|
|
|
print("go_to_line requires a number as parameter")
|
|
|
|
|
else:
|
2023-07-30 21:35:34 +00:00
|
|
|
|
r = self.get_renderer("list:///%s"%list)
|
2023-08-13 10:29:32 +00:00
|
|
|
|
url = r.get_link(int(line))
|
2022-03-07 14:36:16 +00:00
|
|
|
|
display = not self.sync_only
|
2023-07-30 21:35:34 +00:00
|
|
|
|
if url:
|
|
|
|
|
self._go_to_url(url,handle=display)
|
2023-08-13 10:29:32 +00:00
|
|
|
|
return url
|
2022-01-18 21:19:43 +00:00
|
|
|
|
|
2022-01-17 13:32:46 +00:00
|
|
|
|
def list_show(self,list):
|
2022-01-22 14:08:06 +00:00
|
|
|
|
list_path = self.list_path(list)
|
|
|
|
|
if not list_path:
|
2022-01-17 13:32:46 +00:00
|
|
|
|
print("List %s does not exist. Create it with ""list create %s"""%(list,list))
|
|
|
|
|
else:
|
2023-08-03 21:17:12 +00:00
|
|
|
|
url = "list:///%s"%list
|
2023-03-12 06:47:38 +00:00
|
|
|
|
display = not self.sync_only
|
2023-08-03 21:17:12 +00:00
|
|
|
|
self._go_to_url(url,handle=display)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
2022-01-22 14:08:06 +00:00
|
|
|
|
#return the path of the list file if list exists.
|
|
|
|
|
#return None if the list doesn’t exist.
|
|
|
|
|
def list_path(self,list):
|
2023-12-04 10:10:20 +00:00
|
|
|
|
listdir = os.path.join(xdg("data"),"lists")
|
2022-01-17 13:32:46 +00:00
|
|
|
|
list_path = os.path.join(listdir, "%s.gmi"%list)
|
2022-01-22 14:08:06 +00:00
|
|
|
|
if os.path.exists(list_path):
|
|
|
|
|
return list_path
|
|
|
|
|
else:
|
|
|
|
|
return None
|
|
|
|
|
|
2022-11-05 22:14:41 +00:00
|
|
|
|
def list_create(self,list,title=None,quite=False):
|
2022-01-22 14:08:06 +00:00
|
|
|
|
list_path = self.list_path(list)
|
2022-02-15 15:50:33 +00:00
|
|
|
|
if list in ["create","edit","delete","help"]:
|
2022-01-22 14:08:06 +00:00
|
|
|
|
print("%s is not allowed as a name for a list"%list)
|
|
|
|
|
elif not list_path:
|
2023-12-04 10:10:20 +00:00
|
|
|
|
listdir = os.path.join(xdg("data"),"lists")
|
2022-01-22 14:08:06 +00:00
|
|
|
|
os.makedirs(listdir,exist_ok=True)
|
|
|
|
|
list_path = os.path.join(listdir, "%s.gmi"%list)
|
2022-01-21 16:42:14 +00:00
|
|
|
|
with open(list_path,"a") as lfile:
|
2022-01-17 13:32:46 +00:00
|
|
|
|
if title:
|
2022-01-21 16:42:14 +00:00
|
|
|
|
lfile.write("# %s\n"%title)
|
2022-01-17 13:32:46 +00:00
|
|
|
|
else:
|
2022-01-21 16:42:14 +00:00
|
|
|
|
lfile.write("# %s\n"%list)
|
2022-01-17 13:32:46 +00:00
|
|
|
|
lfile.close()
|
2022-11-05 22:14:41 +00:00
|
|
|
|
if not quite:
|
|
|
|
|
print("list created. Display with `list %s`"%list)
|
2022-01-21 16:42:14 +00:00
|
|
|
|
else:
|
|
|
|
|
print("list %s already exists" %list)
|
2023-03-12 06:47:38 +00:00
|
|
|
|
|
2022-01-22 14:08:06 +00:00
|
|
|
|
def do_move(self,arg):
|
|
|
|
|
"""move LIST will add the current page to the list LIST.
|
2023-03-12 06:47:38 +00:00
|
|
|
|
With a major twist: current page will be removed from all other lists.
|
2022-01-22 14:08:06 +00:00
|
|
|
|
If current page was not in a list, this command is similar to `add LIST`."""
|
|
|
|
|
if not arg:
|
|
|
|
|
print("LIST argument is required as the target for your move")
|
2022-01-26 17:52:38 +00:00
|
|
|
|
elif arg[0] == "archives":
|
|
|
|
|
self.do_archive()
|
2022-01-22 14:08:06 +00:00
|
|
|
|
else:
|
|
|
|
|
args = arg.split()
|
|
|
|
|
list_path = self.list_path(args[0])
|
|
|
|
|
if not list_path:
|
|
|
|
|
print("%s is not a list, aborting the move" %args[0])
|
|
|
|
|
else:
|
2022-01-26 17:52:38 +00:00
|
|
|
|
lists = self.list_lists()
|
2022-01-22 14:08:06 +00:00
|
|
|
|
for l in lists:
|
2022-01-26 17:52:38 +00:00
|
|
|
|
if l != args[0] and l not in ["archives", "history"]:
|
2023-08-13 21:19:55 +00:00
|
|
|
|
url, mode = unmode_url(self.current_url)
|
|
|
|
|
isremoved = self.list_rm_url(url,l)
|
2022-01-22 14:08:06 +00:00
|
|
|
|
if isremoved:
|
2022-01-26 17:52:38 +00:00
|
|
|
|
print("Removed from %s"%l)
|
2022-01-22 14:08:06 +00:00
|
|
|
|
self.list_add_line(args[0])
|
2023-03-12 06:47:38 +00:00
|
|
|
|
|
2022-01-23 22:14:06 +00:00
|
|
|
|
def list_lists(self):
|
2023-12-04 10:10:20 +00:00
|
|
|
|
listdir = os.path.join(xdg("data"),"lists")
|
2022-01-23 22:14:06 +00:00
|
|
|
|
to_return = []
|
|
|
|
|
if os.path.exists(listdir):
|
|
|
|
|
lists = os.listdir(listdir)
|
|
|
|
|
if len(lists) > 0:
|
|
|
|
|
for l in lists:
|
|
|
|
|
#removing the .gmi at the end of the name
|
|
|
|
|
to_return.append(l[:-4])
|
|
|
|
|
return to_return
|
2023-03-12 06:47:38 +00:00
|
|
|
|
|
2022-02-15 13:56:35 +00:00
|
|
|
|
def list_has_status(self,list,status):
|
|
|
|
|
path = self.list_path(list)
|
|
|
|
|
toreturn = False
|
|
|
|
|
if path:
|
|
|
|
|
with open(path) as f:
|
|
|
|
|
line = f.readline().strip()
|
|
|
|
|
f.close()
|
|
|
|
|
if line.startswith("#") and status in line:
|
|
|
|
|
toreturn = True
|
|
|
|
|
return toreturn
|
|
|
|
|
|
|
|
|
|
def list_is_subscribed(self,list):
|
2022-02-15 14:05:08 +00:00
|
|
|
|
return self.list_has_status(list,"#subscribed")
|
2022-02-15 13:56:35 +00:00
|
|
|
|
def list_is_frozen(self,list):
|
2022-02-15 14:05:08 +00:00
|
|
|
|
return self.list_has_status(list,"#frozen")
|
2022-02-15 13:56:35 +00:00
|
|
|
|
def list_is_system(self,list):
|
|
|
|
|
return list in ["history","to_fetch","archives","tour"]
|
|
|
|
|
|
|
|
|
|
# This modify the status of a list to one of :
|
|
|
|
|
# normal, frozen, subscribed
|
|
|
|
|
# action is either #frozen, #subscribed or None
|
|
|
|
|
def list_modify(self,list,action=None):
|
|
|
|
|
path = self.list_path(list)
|
|
|
|
|
with open(path) as f:
|
|
|
|
|
lines = f.readlines()
|
|
|
|
|
f.close()
|
|
|
|
|
if lines[0].strip().startswith("#"):
|
|
|
|
|
first_line = lines.pop(0).strip("\n")
|
|
|
|
|
else:
|
|
|
|
|
first_line = "# %s "%list
|
|
|
|
|
first_line = first_line.replace("#subscribed","").replace("#frozen","")
|
|
|
|
|
if action:
|
|
|
|
|
first_line += " " + action
|
|
|
|
|
print("List %s has been marked as %s"%(list,action))
|
|
|
|
|
else:
|
|
|
|
|
print("List %s is now a normal list" %list)
|
|
|
|
|
first_line += "\n"
|
|
|
|
|
lines.insert(0,first_line)
|
|
|
|
|
with open(path,"w") as f:
|
|
|
|
|
for line in lines:
|
|
|
|
|
f.write(line)
|
|
|
|
|
f.close()
|
2022-01-21 16:42:14 +00:00
|
|
|
|
def do_list(self,arg):
|
|
|
|
|
"""Manage list of bookmarked pages.
|
2022-01-24 11:24:19 +00:00
|
|
|
|
- list : display available lists
|
|
|
|
|
- list $LIST : display pages in $LIST
|
|
|
|
|
- list create $NEWLIST : create a new list
|
|
|
|
|
- list edit $LIST : edit the list
|
2023-03-12 06:47:38 +00:00
|
|
|
|
- list subscribe $LIST : during sync, add new links found in listed pages to tour
|
2022-02-15 14:18:07 +00:00
|
|
|
|
- list freeze $LIST : don’t update pages in list during sync if a cache already exists
|
2022-02-15 13:56:35 +00:00
|
|
|
|
- list normal $LIST : update pages in list during sync but don’t add anything to tour
|
2022-01-24 14:09:05 +00:00
|
|
|
|
- list delete $LIST : delete a list permanently (a confirmation is required)
|
2022-02-15 15:50:33 +00:00
|
|
|
|
- list help : print this help
|
2022-01-24 11:24:19 +00:00
|
|
|
|
See also :
|
2022-02-14 22:02:53 +00:00
|
|
|
|
- add $LIST (to add current page to $LIST or, by default, to bookmarks)
|
2022-01-30 18:23:50 +00:00
|
|
|
|
- move $LIST (to add current page to list while removing from all others)
|
2022-03-08 21:21:44 +00:00
|
|
|
|
- archive (to remove current page from all lists while adding to archives)
|
2023-07-02 21:46:34 +00:00
|
|
|
|
|
|
|
|
|
There’s no "delete" on purpose. The use of "archive" is recommended.
|
|
|
|
|
|
|
|
|
|
The following lists cannot be removed or frozen but can be edited with "list edit"
|
|
|
|
|
- list archives : contains last 200 archived URLs
|
|
|
|
|
- history : contains last 200 visisted URLs
|
|
|
|
|
- to_fetch : contains URLs that will be fetch during the next sync
|
|
|
|
|
- tour : contains the next URLs to visit during a tour (see "help tour")
|
|
|
|
|
|
|
|
|
|
"""
|
2023-12-04 10:10:20 +00:00
|
|
|
|
listdir = os.path.join(xdg("data"),"lists")
|
2022-01-21 16:42:14 +00:00
|
|
|
|
os.makedirs(listdir,exist_ok=True)
|
|
|
|
|
if not arg:
|
2022-01-23 22:14:06 +00:00
|
|
|
|
lists = self.list_lists()
|
2022-01-21 16:42:14 +00:00
|
|
|
|
if len(lists) > 0:
|
2023-08-03 21:17:12 +00:00
|
|
|
|
lurl = "list:///"
|
|
|
|
|
self._go_to_url(lurl)
|
2022-01-21 16:42:14 +00:00
|
|
|
|
else:
|
|
|
|
|
print("No lists yet. Use `list create`")
|
|
|
|
|
else:
|
|
|
|
|
args = arg.split()
|
2022-01-22 14:08:06 +00:00
|
|
|
|
if args[0] == "create":
|
2022-01-21 16:42:14 +00:00
|
|
|
|
if len(args) > 2:
|
|
|
|
|
name = " ".join(args[2:])
|
2022-01-23 22:14:06 +00:00
|
|
|
|
self.list_create(args[1].lower(),title=name)
|
2022-01-22 14:08:06 +00:00
|
|
|
|
elif len(args) == 2:
|
2022-01-23 22:14:06 +00:00
|
|
|
|
self.list_create(args[1].lower())
|
2022-01-21 16:42:14 +00:00
|
|
|
|
else:
|
2022-01-22 14:08:06 +00:00
|
|
|
|
print("A name is required to create a new list. Use `list create NAME`")
|
2022-01-24 11:24:19 +00:00
|
|
|
|
elif args[0] == "edit":
|
2022-03-29 11:08:35 +00:00
|
|
|
|
editor = None
|
|
|
|
|
if "editor" in self.options and self.options["editor"]:
|
|
|
|
|
editor = self.options["editor"]
|
|
|
|
|
elif os.environ.get("VISUAL"):
|
|
|
|
|
editor = os.environ.get("VISUAL")
|
|
|
|
|
elif os.environ.get("EDITOR"):
|
|
|
|
|
editor = os.environ.get("EDITOR")
|
|
|
|
|
if editor:
|
|
|
|
|
if len(args) > 1 and args[1] in self.list_lists():
|
2022-01-24 11:24:19 +00:00
|
|
|
|
path = os.path.join(listdir,args[1]+".gmi")
|
2022-03-29 11:08:35 +00:00
|
|
|
|
try:
|
Avoid passing improperly escaped paths to shell
This should fix https://notabug.org/ploum/offpunk/issues/9 . This
involves a few closely-related changes to subprogram execution:
- If a path, url or file contents were being passed using `cat` or
`echo`, the code was changed to pass the file/string on stdin. This
also makes several pipelines into single programs and should allow for
the removal of `shell=True` in the future.
- For `file`, `xdg-open` and `less`, which either can't accept their
input on stdin or otherwise use the path, the paths are now being
escaped with `shlex.quote()`.
- Finally, the environment variable $LESSHISTFILE is now being set in
python code, where escaping is not necessary.
Notably, the argument to `grep` in `less_cmd()` is not quoted in this
commit, since I was unsure of how it was meant to be used. If the
argument is not already quoted, this should probably be passed through
`shlex.quote()`.
This does not do the following, which may be desired:
- This does not disable `shell=True` anywhere, since `subprocess.run()`
requires the command to be already split into a list of strings. I
think this would just require a `shlex.split()` in `run()` when this
is disabled, but it may require more thought.
- Some of the invoked programs (with the notable exception of `echo` and
`xdg-open`) support the use of "--" to prevent any following arguments
from being treated as program flags if they start with "-". I don't
believe there are any paths that start with "-", but it may make sense
to include this where possible.
I have briefly tested this commit, but it touches quite a few code
paths, so there might be bugs that I missed.
2022-11-30 22:14:03 +00:00
|
|
|
|
# Note that we intentionally don't quote the editor.
|
|
|
|
|
# In the unlikely case `editor` includes a percent
|
|
|
|
|
# sign, we also escape it for the %-formatting.
|
|
|
|
|
cmd = editor.replace("%", "%%") + " %s"
|
|
|
|
|
run(cmd, parameter=path, direct_output=True)
|
2022-03-29 11:08:35 +00:00
|
|
|
|
except Exception as err:
|
|
|
|
|
print(err)
|
|
|
|
|
print("Please set a valid editor with \"set editor\"")
|
2022-01-24 11:24:19 +00:00
|
|
|
|
else:
|
|
|
|
|
print("A valid list name is required to edit a list")
|
|
|
|
|
else:
|
2022-03-29 11:08:35 +00:00
|
|
|
|
print("No valid editor has been found.")
|
|
|
|
|
print("You can use the following command to set your favourite editor:")
|
|
|
|
|
print("set editor EDITOR")
|
|
|
|
|
print("or use the $VISUAL or $EDITOR environment variables.")
|
2022-01-24 14:09:05 +00:00
|
|
|
|
elif args[0] == "delete":
|
|
|
|
|
if len(args) > 1:
|
2022-02-15 13:56:35 +00:00
|
|
|
|
if self.list_is_system(args[1]):
|
2022-01-24 14:33:33 +00:00
|
|
|
|
print("%s is a system list which cannot be deleted"%args[1])
|
2022-01-24 14:09:05 +00:00
|
|
|
|
elif args[1] in self.list_lists():
|
|
|
|
|
size = len(self.list_get_links(args[1]))
|
|
|
|
|
stri = "Are you sure you want to delete %s ?\n"%args[1]
|
|
|
|
|
confirm = "YES"
|
|
|
|
|
if size > 0:
|
|
|
|
|
stri += "! %s items in the list will be lost !\n"%size
|
|
|
|
|
confirm = "YES DELETE %s" %size
|
|
|
|
|
else :
|
|
|
|
|
stri += "The list is empty, it should be safe to delete it.\n"
|
|
|
|
|
stri += "Type \"%s\" (in capital, without quotes) to confirm :"%confirm
|
|
|
|
|
answer = input(stri)
|
|
|
|
|
if answer == confirm:
|
|
|
|
|
path = os.path.join(listdir,args[1]+".gmi")
|
|
|
|
|
os.remove(path)
|
|
|
|
|
print("* * * %s has been deleted" %args[1])
|
|
|
|
|
else:
|
|
|
|
|
print("A valid list name is required to be deleted")
|
|
|
|
|
else:
|
|
|
|
|
print("A valid list name is required to be deleted")
|
2022-02-15 13:56:35 +00:00
|
|
|
|
elif args[0] in ["subscribe","freeze","normal"]:
|
|
|
|
|
if len(args) > 1:
|
|
|
|
|
if self.list_is_system(args[1]):
|
|
|
|
|
print("You cannot modify %s which is a system list"%args[1])
|
|
|
|
|
elif args[1] in self.list_lists():
|
|
|
|
|
if args[0] == "subscribe":
|
|
|
|
|
action = "#subscribed"
|
|
|
|
|
elif args[0] == "freeze":
|
|
|
|
|
action = "#frozen"
|
|
|
|
|
else:
|
|
|
|
|
action = None
|
|
|
|
|
self.list_modify(args[1],action=action)
|
|
|
|
|
else:
|
|
|
|
|
print("A valid list name is required after %s" %args[0])
|
2022-02-15 15:50:33 +00:00
|
|
|
|
elif args[0] == "help":
|
|
|
|
|
self.onecmd("help list")
|
2022-01-23 12:52:34 +00:00
|
|
|
|
elif len(args) == 1:
|
2022-01-23 22:14:06 +00:00
|
|
|
|
self.list_show(args[0].lower())
|
2022-01-23 12:52:34 +00:00
|
|
|
|
else:
|
2022-01-23 22:14:06 +00:00
|
|
|
|
self.list_go_to_line(args[1],args[0].lower())
|
2022-01-21 16:42:14 +00:00
|
|
|
|
|
2019-06-22 12:58:21 +00:00
|
|
|
|
def do_help(self, arg):
|
|
|
|
|
"""ALARM! Recursion detected! ALARM! Prepare to eject!"""
|
|
|
|
|
if arg == "!":
|
|
|
|
|
print("! is an alias for 'shell'")
|
|
|
|
|
elif arg == "?":
|
|
|
|
|
print("? is an alias for 'help'")
|
2022-01-05 20:12:59 +00:00
|
|
|
|
elif arg in _ABBREVS:
|
|
|
|
|
full_cmd = _ABBREVS[arg]
|
2022-03-07 22:16:17 +00:00
|
|
|
|
print("%s is an alias for '%s'" %(arg,full_cmd))
|
2022-01-05 20:12:59 +00:00
|
|
|
|
print("See the list of aliases with 'abbrevs'")
|
|
|
|
|
print("'help %s':"%full_cmd)
|
|
|
|
|
cmd.Cmd.do_help(self, full_cmd)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
else:
|
|
|
|
|
cmd.Cmd.do_help(self, arg)
|
|
|
|
|
|
2022-02-18 21:55:32 +00:00
|
|
|
|
def do_sync(self, line):
|
2023-07-02 21:46:34 +00:00
|
|
|
|
"""Synchronize all bookmarks lists and URLs from the to_fetch list.
|
2022-02-18 21:55:32 +00:00
|
|
|
|
- New elements in pages in subscribed lists will be added to tour
|
|
|
|
|
- Elements in list to_fetch will be retrieved and added to tour
|
|
|
|
|
- Normal lists will be synchronized and updated
|
|
|
|
|
- Frozen lists will be fetched only if not present.
|
|
|
|
|
|
2023-07-02 21:46:34 +00:00
|
|
|
|
Before a sync, you can edit the list of URLs that will be fetched with the
|
|
|
|
|
following command: "list edit to_fetch"
|
|
|
|
|
|
2022-02-18 21:55:32 +00:00
|
|
|
|
Argument : duration of cache validity (in seconds)."""
|
|
|
|
|
if self.offline_only:
|
|
|
|
|
print("Sync can only be achieved online. Change status with `online`.")
|
|
|
|
|
return
|
|
|
|
|
args = line.split()
|
|
|
|
|
if len(args) > 0:
|
|
|
|
|
if not args[0].isdigit():
|
|
|
|
|
print("sync argument should be the cache validity expressed in seconds")
|
|
|
|
|
return
|
|
|
|
|
else:
|
|
|
|
|
validity = int(args[0])
|
|
|
|
|
else:
|
|
|
|
|
validity = 0
|
|
|
|
|
self.call_sync(refresh_time=validity)
|
|
|
|
|
|
2023-09-07 14:38:48 +00:00
|
|
|
|
def call_sync(self,refresh_time=0,depth=1,lists=None):
|
2023-08-03 21:17:12 +00:00
|
|
|
|
# fetch_url is the core of the sync algorithm.
|
2022-02-18 21:55:32 +00:00
|
|
|
|
# It takes as input :
|
2023-08-03 21:17:12 +00:00
|
|
|
|
# - an URL to be fetched
|
2022-02-18 21:55:32 +00:00
|
|
|
|
# - depth : the degree of recursion to build the cache (0 means no recursion)
|
|
|
|
|
# - validity : the age, in seconds, existing caches need to have before
|
|
|
|
|
# being refreshed (0 = never refreshed if it already exists)
|
|
|
|
|
# - savetotour : if True, newly cached items are added to tour
|
2023-08-03 21:17:12 +00:00
|
|
|
|
def add_to_tour(url):
|
|
|
|
|
if url and netcache.is_cache_valid(url):
|
|
|
|
|
toprint = " -> adding to tour: %s" %url
|
2022-03-19 14:58:28 +00:00
|
|
|
|
width = term_width() - 1
|
|
|
|
|
toprint = toprint[:width]
|
|
|
|
|
toprint += " "*(width-len(toprint))
|
|
|
|
|
print(toprint)
|
2023-08-03 21:17:12 +00:00
|
|
|
|
self.list_add_line("tour",url=url,verbose=False)
|
2022-02-18 21:55:32 +00:00
|
|
|
|
return True
|
|
|
|
|
else:
|
|
|
|
|
return False
|
2023-08-03 21:17:12 +00:00
|
|
|
|
def fetch_url(url,depth=0,validity=0,savetotour=False,count=[0,0],strin=""):
|
2022-02-18 21:55:32 +00:00
|
|
|
|
#savetotour = True will save to tour newly cached content
|
|
|
|
|
# else, do not save to tour
|
|
|
|
|
#regardless of valitidy
|
2023-08-03 21:17:12 +00:00
|
|
|
|
if not url: return
|
|
|
|
|
if not netcache.is_cache_valid(url,validity=validity):
|
2022-02-18 21:55:32 +00:00
|
|
|
|
if strin != "":
|
|
|
|
|
endline = '\r'
|
|
|
|
|
else:
|
|
|
|
|
endline = None
|
|
|
|
|
#Did we already had a cache (even an old one) ?
|
2023-08-03 21:17:12 +00:00
|
|
|
|
isnew = not netcache.is_cache_valid(url)
|
|
|
|
|
toprint = "%s [%s/%s] Fetch "%(strin,count[0],count[1]) + url
|
2022-03-19 14:58:28 +00:00
|
|
|
|
width = term_width() - 1
|
|
|
|
|
toprint = toprint[:width]
|
|
|
|
|
toprint += " "*(width-len(toprint))
|
|
|
|
|
print(toprint,end=endline)
|
2022-03-03 15:16:32 +00:00
|
|
|
|
#If not saving to tour, then we should limit download size
|
|
|
|
|
limit = not savetotour
|
2023-08-03 21:17:12 +00:00
|
|
|
|
self._go_to_url(url,update_hist=False,limit_size=limit)
|
|
|
|
|
if savetotour and isnew and netcache.is_cache_valid(url):
|
2023-03-12 06:47:38 +00:00
|
|
|
|
#we add to the next tour only if we managed to cache
|
2022-02-18 21:55:32 +00:00
|
|
|
|
#the ressource
|
2023-08-03 21:17:12 +00:00
|
|
|
|
add_to_tour(url)
|
2022-02-18 21:55:32 +00:00
|
|
|
|
#Now, recursive call, even if we didn’t refresh the cache
|
2023-03-12 06:47:38 +00:00
|
|
|
|
# This recursive call is impacting performances a lot but is needed
|
2022-10-20 20:53:33 +00:00
|
|
|
|
# For the case when you add a address to a list to read later
|
|
|
|
|
# You then expect the links to be loaded during next refresh, even
|
|
|
|
|
# if the link itself is fresh enough
|
|
|
|
|
# see fetch_list()
|
2022-02-18 21:55:32 +00:00
|
|
|
|
if depth > 0:
|
2022-03-03 15:16:32 +00:00
|
|
|
|
#we should only savetotour at the first level of recursion
|
|
|
|
|
# The code for this was removed so, currently, we savetotour
|
|
|
|
|
# at every level of recursion.
|
2023-08-14 10:23:09 +00:00
|
|
|
|
r = self.get_renderer(url)
|
2023-08-30 09:54:24 +00:00
|
|
|
|
url,oldmode = unmode_url(url)
|
|
|
|
|
if oldmode == "full":
|
|
|
|
|
mode = "full_links_only"
|
|
|
|
|
else:
|
|
|
|
|
mode = "links_only"
|
2023-08-14 10:23:09 +00:00
|
|
|
|
if r:
|
2023-08-30 09:54:24 +00:00
|
|
|
|
links = r.get_links(mode=mode)
|
2023-08-14 10:23:09 +00:00
|
|
|
|
subcount = [0,len(links)]
|
|
|
|
|
d = depth - 1
|
|
|
|
|
for k in links:
|
|
|
|
|
#recursive call (validity is always 0 in recursion)
|
|
|
|
|
substri = strin + " -->"
|
|
|
|
|
subcount[0] += 1
|
|
|
|
|
fetch_url(k,depth=d,validity=0,savetotour=savetotour,\
|
|
|
|
|
count=subcount,strin=substri)
|
2022-02-18 21:55:32 +00:00
|
|
|
|
def fetch_list(list,validity=0,depth=1,tourandremove=False,tourchildren=False):
|
|
|
|
|
links = self.list_get_links(list)
|
|
|
|
|
end = len(links)
|
|
|
|
|
counter = 0
|
|
|
|
|
print(" * * * %s to fetch in %s * * *" %(end,list))
|
|
|
|
|
for l in links:
|
|
|
|
|
counter += 1
|
2022-10-20 20:53:33 +00:00
|
|
|
|
# If cache for a link is newer than the list
|
2023-08-03 21:17:12 +00:00
|
|
|
|
fetch_url(l,depth=depth,validity=validity,savetotour=tourchildren,count=[counter,end])
|
2022-02-18 21:55:32 +00:00
|
|
|
|
if tourandremove:
|
|
|
|
|
if add_to_tour(l):
|
2023-08-03 14:54:29 +00:00
|
|
|
|
self.list_rm_url(l,list)
|
2023-03-12 06:47:38 +00:00
|
|
|
|
|
2022-02-18 21:55:32 +00:00
|
|
|
|
self.sync_only = True
|
2023-09-07 14:38:48 +00:00
|
|
|
|
if not lists:
|
|
|
|
|
lists = self.list_lists()
|
2022-02-18 21:55:32 +00:00
|
|
|
|
# We will fetch all the lists except "archives" and "history"
|
|
|
|
|
# We keep tour for the last round
|
|
|
|
|
subscriptions = []
|
|
|
|
|
normal_lists = []
|
|
|
|
|
fridge = []
|
|
|
|
|
for l in lists:
|
2023-09-07 14:38:48 +00:00
|
|
|
|
#only try existing lists
|
|
|
|
|
if l in self.list_lists():
|
|
|
|
|
if not self.list_is_system(l):
|
|
|
|
|
if self.list_is_frozen(l):
|
|
|
|
|
fridge.append(l)
|
|
|
|
|
elif self.list_is_subscribed(l):
|
|
|
|
|
subscriptions.append(l)
|
|
|
|
|
else:
|
|
|
|
|
normal_lists.append(l)
|
2022-02-18 21:55:32 +00:00
|
|
|
|
# We start with the "subscribed" as we need to find new items
|
2022-03-04 11:38:41 +00:00
|
|
|
|
starttime = int(time.time())
|
2022-02-18 21:55:32 +00:00
|
|
|
|
for l in subscriptions:
|
|
|
|
|
fetch_list(l,validity=refresh_time,depth=depth,tourchildren=True)
|
|
|
|
|
#Then the fetch list (item are removed from the list after fetch)
|
2022-03-04 11:38:41 +00:00
|
|
|
|
# We fetch regarless of the refresh_time
|
2022-02-18 21:55:32 +00:00
|
|
|
|
if "to_fetch" in lists:
|
2022-03-04 11:38:41 +00:00
|
|
|
|
nowtime = int(time.time())
|
|
|
|
|
short_valid = nowtime - starttime
|
|
|
|
|
fetch_list("to_fetch",validity=short_valid,depth=depth,tourandremove=True)
|
2022-02-18 21:55:32 +00:00
|
|
|
|
#then we fetch all the rest (including bookmarks and tour)
|
|
|
|
|
for l in normal_lists:
|
|
|
|
|
fetch_list(l,validity=refresh_time,depth=depth)
|
|
|
|
|
for l in fridge:
|
|
|
|
|
fetch_list(l,validity=0,depth=depth)
|
|
|
|
|
#tour should be the last one as item my be added to it by others
|
|
|
|
|
fetch_list("tour",validity=refresh_time,depth=depth)
|
|
|
|
|
print("End of sync")
|
|
|
|
|
self.sync_only = False
|
|
|
|
|
|
2019-06-22 12:58:21 +00:00
|
|
|
|
### The end!
|
|
|
|
|
def do_quit(self, *args):
|
2021-12-30 15:03:08 +00:00
|
|
|
|
"""Exit Offpunk."""
|
2023-08-13 21:19:55 +00:00
|
|
|
|
self.opencache.cleanup()
|
2021-12-30 15:03:08 +00:00
|
|
|
|
print("You can close your screen!")
|
2019-06-22 12:58:21 +00:00
|
|
|
|
sys.exit()
|
|
|
|
|
|
|
|
|
|
do_exit = do_quit
|
|
|
|
|
|
2022-02-18 21:55:32 +00:00
|
|
|
|
|
|
|
|
|
|
2019-06-22 12:58:21 +00:00
|
|
|
|
# Main function
|
|
|
|
|
def main():
|
|
|
|
|
|
|
|
|
|
# Parse args
|
2023-03-12 06:47:38 +00:00
|
|
|
|
parser = argparse.ArgumentParser(description=__doc__)
|
2019-06-22 12:58:21 +00:00
|
|
|
|
parser.add_argument('--bookmarks', action='store_true',
|
|
|
|
|
help='start with your list of bookmarks')
|
2023-08-15 20:54:50 +00:00
|
|
|
|
parser.add_argument('--config-file',metavar='FILE',
|
|
|
|
|
help='use this particular config file instead of default')
|
2023-03-12 06:47:38 +00:00
|
|
|
|
parser.add_argument('--sync', action='store_true',
|
2023-09-07 14:38:48 +00:00
|
|
|
|
help='run non-interactively to build cache by exploring lists passed \
|
|
|
|
|
as argument. Without argument, all lists are fetched.')
|
2023-03-12 06:47:38 +00:00
|
|
|
|
parser.add_argument('--assume-yes', action='store_true',
|
2022-11-30 20:02:17 +00:00
|
|
|
|
help='assume-yes when asked questions about certificates/redirections during sync (lower security)')
|
2022-02-04 14:55:22 +00:00
|
|
|
|
parser.add_argument('--disable-http',action='store_true',
|
|
|
|
|
help='do not try to get http(s) links (but already cached will be displayed)')
|
2023-03-12 06:47:38 +00:00
|
|
|
|
parser.add_argument('--fetch-later', action='store_true',
|
2022-01-19 14:21:28 +00:00
|
|
|
|
help='run non-interactively with an URL as argument to fetch it later')
|
2023-03-12 06:47:38 +00:00
|
|
|
|
parser.add_argument('--depth',
|
2022-02-04 00:14:22 +00:00
|
|
|
|
help='depth of the cache to build. Default is 1. More is crazy. Use at your own risks!')
|
2023-08-24 13:14:49 +00:00
|
|
|
|
parser.add_argument('--images-mode',
|
|
|
|
|
help='the mode to use to choose which images to download in a HTML page.\
|
|
|
|
|
one of (None, readable, full). Warning: full will slowdown your sync.')
|
2023-03-12 06:47:38 +00:00
|
|
|
|
parser.add_argument('--cache-validity',
|
2021-12-16 14:58:05 +00:00
|
|
|
|
help='duration for which a cache is valid before sync (seconds)')
|
2020-05-10 12:34:48 +00:00
|
|
|
|
parser.add_argument('--version', action='store_true',
|
|
|
|
|
help='display version information and quit')
|
2022-07-03 13:38:29 +00:00
|
|
|
|
parser.add_argument('--features', action='store_true',
|
|
|
|
|
help='display available features and dependancies then quit')
|
2019-06-22 12:58:21 +00:00
|
|
|
|
parser.add_argument('url', metavar='URL', nargs='*',
|
2023-09-07 14:38:48 +00:00
|
|
|
|
help='Arguments should be URL to be fetched or, if --sync is used, lists')
|
2019-06-22 12:58:21 +00:00
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
2020-05-10 12:34:48 +00:00
|
|
|
|
# Handle --version
|
|
|
|
|
if args.version:
|
2023-03-12 06:47:38 +00:00
|
|
|
|
print("Offpunk " + __version__)
|
2020-05-10 12:34:48 +00:00
|
|
|
|
sys.exit()
|
2022-07-03 13:38:29 +00:00
|
|
|
|
elif args.features:
|
|
|
|
|
GeminiClient.do_version(None,None)
|
2023-03-12 06:47:38 +00:00
|
|
|
|
sys.exit()
|
2022-07-03 13:31:33 +00:00
|
|
|
|
else:
|
2023-12-04 10:10:20 +00:00
|
|
|
|
for f in [xdg("config"), xdg("data")]:
|
2022-07-03 13:31:33 +00:00
|
|
|
|
if not os.path.exists(f):
|
|
|
|
|
print("Creating config directory {}".format(f))
|
|
|
|
|
os.makedirs(f)
|
2020-05-10 12:34:48 +00:00
|
|
|
|
|
2019-06-22 12:58:21 +00:00
|
|
|
|
# Instantiate client
|
2022-11-30 19:57:23 +00:00
|
|
|
|
gc = GeminiClient(synconly=args.sync)
|
2022-02-18 21:55:32 +00:00
|
|
|
|
torun_queue = []
|
2023-03-12 06:47:38 +00:00
|
|
|
|
|
2022-10-06 16:22:34 +00:00
|
|
|
|
# Interactive if offpunk started normally
|
|
|
|
|
# False if started with --sync
|
|
|
|
|
# Queue is a list of command (potentially empty)
|
2023-08-15 20:54:50 +00:00
|
|
|
|
def read_config(queue,rcfile=None,interactive=True):
|
|
|
|
|
if not rcfile:
|
2023-12-04 10:10:20 +00:00
|
|
|
|
rcfile = os.path.join(xdg("config"), "offpunkrc")
|
2022-10-06 16:22:34 +00:00
|
|
|
|
if os.path.exists(rcfile):
|
|
|
|
|
print("Using config %s" % rcfile)
|
|
|
|
|
with open(rcfile, "r") as fp:
|
|
|
|
|
for line in fp:
|
|
|
|
|
line = line.strip()
|
|
|
|
|
if ((args.bookmarks or args.url) and
|
|
|
|
|
any((line.startswith(x) for x in ("go", "g", "tour", "t")))
|
|
|
|
|
):
|
|
|
|
|
if args.bookmarks:
|
|
|
|
|
print("Skipping rc command \"%s\" due to --bookmarks option." % line)
|
|
|
|
|
else:
|
|
|
|
|
print("Skipping rc command \"%s\" due to provided URLs." % line)
|
|
|
|
|
continue
|
|
|
|
|
# We always consider redirect
|
|
|
|
|
# for the rest, we need to be interactive
|
|
|
|
|
if line.startswith("redirect") or interactive:
|
|
|
|
|
queue.append(line)
|
|
|
|
|
return queue
|
2019-06-22 12:58:21 +00:00
|
|
|
|
# Act on args
|
|
|
|
|
if args.bookmarks:
|
2022-02-18 21:55:32 +00:00
|
|
|
|
torun_queue.append("bookmarks")
|
2023-09-07 14:38:48 +00:00
|
|
|
|
elif args.url and not args.sync:
|
2019-06-22 12:58:21 +00:00
|
|
|
|
if len(args.url) == 1:
|
2022-02-18 21:55:32 +00:00
|
|
|
|
torun_queue.append("go %s" % args.url[0])
|
2019-06-22 12:58:21 +00:00
|
|
|
|
else:
|
|
|
|
|
for url in args.url:
|
2022-02-18 21:55:32 +00:00
|
|
|
|
torun_queue.append("tour %s" % url)
|
|
|
|
|
torun_queue.append("tour")
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
2022-02-04 14:55:22 +00:00
|
|
|
|
if args.disable_http:
|
2022-02-12 13:41:05 +00:00
|
|
|
|
gc.support_http = False
|
2022-02-04 14:55:22 +00:00
|
|
|
|
|
2022-02-18 16:13:20 +00:00
|
|
|
|
# Endless interpret loop (except while --sync or --fetch-later)
|
2022-01-19 14:21:28 +00:00
|
|
|
|
if args.fetch_later:
|
|
|
|
|
if args.url:
|
|
|
|
|
gc.sync_only = True
|
|
|
|
|
for u in args.url:
|
2023-08-31 18:39:13 +00:00
|
|
|
|
if looks_like_url(u):
|
|
|
|
|
if netcache.is_cache_valid(u):
|
|
|
|
|
gc.list_add_line("tour",u)
|
|
|
|
|
else:
|
|
|
|
|
gc.list_add_line("to_fetch",u)
|
2022-02-14 10:28:12 +00:00
|
|
|
|
else:
|
2023-08-31 18:39:13 +00:00
|
|
|
|
print("%s is not a valid URL to fetch"%u)
|
2022-01-19 14:21:28 +00:00
|
|
|
|
else:
|
2022-01-23 16:09:05 +00:00
|
|
|
|
print("--fetch-later requires an URL (or a list of URLS) as argument")
|
2022-01-19 14:21:28 +00:00
|
|
|
|
elif args.sync:
|
2022-01-22 14:33:39 +00:00
|
|
|
|
if args.assume_yes:
|
2022-01-23 09:44:19 +00:00
|
|
|
|
gc.automatic_choice = "y"
|
2022-11-30 20:02:17 +00:00
|
|
|
|
gc.onecmd("set accept_bad_ssl_certificates True")
|
2021-12-16 14:58:05 +00:00
|
|
|
|
if args.cache_validity:
|
|
|
|
|
refresh_time = int(args.cache_validity)
|
|
|
|
|
else:
|
2022-01-09 20:21:09 +00:00
|
|
|
|
# if no refresh time, a default of 0 is used (which means "infinite")
|
2021-12-18 09:16:19 +00:00
|
|
|
|
refresh_time = 0
|
2023-08-24 13:14:49 +00:00
|
|
|
|
if args.images_mode and args.images_mode in ["none","readable","normal","full"]:
|
|
|
|
|
gc.options["images_mode"] = args.images_mode
|
2022-02-04 00:14:22 +00:00
|
|
|
|
if args.depth:
|
|
|
|
|
depth = int(args.depth)
|
|
|
|
|
else:
|
|
|
|
|
depth = 1
|
2023-08-15 20:54:50 +00:00
|
|
|
|
read_config(torun_queue,rcfile=args.config_file,interactive=False)
|
2022-10-06 16:22:34 +00:00
|
|
|
|
for line in torun_queue:
|
|
|
|
|
gc.onecmd(line)
|
2023-09-07 14:38:48 +00:00
|
|
|
|
lists = None
|
|
|
|
|
gc.call_sync(refresh_time=refresh_time,depth=depth,lists=args.url)
|
2021-12-09 14:12:32 +00:00
|
|
|
|
else:
|
2022-02-18 21:55:32 +00:00
|
|
|
|
# We are in the normal mode. First process config file
|
2023-08-15 20:54:50 +00:00
|
|
|
|
torun_queue = read_config(torun_queue,rcfile=args.config_file,interactive=True)
|
2022-02-18 21:55:32 +00:00
|
|
|
|
print("Welcome to Offpunk!")
|
|
|
|
|
print("Type `help` to get the list of available command.")
|
|
|
|
|
for line in torun_queue:
|
|
|
|
|
gc.onecmd(line)
|
2023-03-12 06:47:38 +00:00
|
|
|
|
|
2022-02-23 09:43:46 +00:00
|
|
|
|
while True:
|
|
|
|
|
try:
|
|
|
|
|
gc.cmdloop()
|
|
|
|
|
except KeyboardInterrupt:
|
2022-02-25 13:36:46 +00:00
|
|
|
|
print("")
|
2019-06-22 12:58:21 +00:00
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
|
main()
|