| #!/bin/bash
|
|
|
| case "$1" in
|
| h|help)
|
| help
|
| exit 0
|
| ;;
|
| i|install)
|
| shift
|
| install $@
|
| ;;
|
| r|remove)
|
| shift
|
| remove $@
|
| ;;
|
| s|search)
|
| shift
|
| search $@
|
| ;;
|
| *)
|
| echo -e "Unknown option: $1\n\n"
|
| help
|
| exit 1
|
| ;;
|
| esac
|
|
|
| help() {
|
| echo -e "usage: retro <command> [arguments]\n\n"
|
| echo -e "\th | help\tdisplay usage\n"
|
| echo -e "\ti | install\tinstall [packages]\n"
|
| echo -e "\tr | remove\tremove [packages]\n"
|
| echo -e "\ts | search\tsearch [packages]\n"
|
| }
|
|
|
| install() {
|
| python retro.py install "$@"
|
| exit $?
|
| }
|
|
|
| remove() {
|
| python retro.py remove "$@"
|
| exit $?
|
| }
|
|
|
| search() {
|
| python retro.py search "$@"
|
| exit $?
|
| }
|
| class REPO:
|
| def __init__(self, name):
|
| self.name = name
|
| self.db = self.cache(f"{self.name}.db")
|
|
|
| try:
|
| from tomllib import load
|
| with open('repos.toml', 'rb') as f:
|
| repo = load(f).get(name)
|
| if "platforms" in repo:
|
| self.platforms = repo.get('platforms')
|
| except:
|
| pass
|
|
|
|
|
| def cache(self, db):
|
| try:
|
| import sqlite3 as sql
|
| cachedb = sql.connect(db)
|
| except:
|
| pass
|
| return cachedb
|
|
|
|
|
| def get(self, url)
|
| self._url = urlparse(url)
|
| self._request = Request(self._url.geturl())
|
| self._request.add_header('User-Agent', 'Mozilla/5.0')
|
|
|
| try:
|
| from urllib.error import HTTPError
|
| from urllib.parse import unquote, urlparse
|
| from urllib.request import Request, urlopen
|
| self._response = urlopen(request)
|
|
|
| except HTTPError as e:
|
| self.code = e.code # HTTPError code
|
| self.error = e.msg # HTTPError message
|
| self.headers = e.hdr # HTTPError headers
|
| self._response = e.fp # HTTPResponse object
|
| return e
|
|
|
| self.code = self._response.status
|
| self.error = None
|
| self.headers = dict(response.getheaders())
|
|
|
| if self._url.geturl() != response.url:
|
| self.redirect = True
|
|
|
| if not self.error:
|
| contentType = self.headers['Content-Type']
|
|
|
| if 'application/json' in contentType:
|
| try:
|
| from json import loads
|
| self.data = loads(self._response.read())
|
|
|
| except JSONDecodeError as e:
|
| self.error = 'JSON decoding failed!'
|
| return e
|
|
|
| elif 'text/html' in contentType:
|
| try:
|
| from bs4 import BeautifulSoup
|
| self.data = BeautifulSoup(self.response.read(), 'lxml')
|
|
|
| except:
|
| self.error = 'accident in the kitchen while making soup'
|
| return
|
| else:
|
| self.error = f"unknown response from {self._url}"
|
| return
|
|
|
| def search(self, query):
|
| from util.repo import REPO
|
|
|
| class OLDGAMESDOWNLOAD(REPO):
|
| def __init__(self):
|
| super.__init__(self, "oldgamesdownload")
|
| self.baseURL = "https://oldgamesdownload.com"
|
| if not self.platforms:
|
| self.platforms={
|
| 'Apple':{
|
| 'II': '/browse/platform/apple-ii',
|
| 'Macintosh': '/browse/platform/mac'
|
| },
|
| 'Atari':{
|
| 'ST': '/browse/platform/atari-st',
|
| '800': '/browse/platform/atari-8-bit',
|
| '2600': '/browse/platform/atari-2600',
|
| '5200': '/browse/platform/atari-5200',
|
| '7800': '/browse/platform/atari-7800'
|
| },
|
| 'Commodore':{
|
| 'Amiga': '/browse/platform/amiga',
|
| '64': '/browse/platform/commodore-64',
|
| 'Plus/4': '/browse/platform/commodore-plus-4'
|
| },
|
| 'Microsoft':{
|
| 'DOS': '/browse/platform/dos',
|
| 'Win32': '/browse/platform/windows',
|
| 'Win16': '/browse/platform/windows-3-x'
|
| },
|
| 'Sega':{
|
| 'Genesis': '/browse/platform/sega-genesis',
|
| 'Master System': '/browse/platform/sega-master-system',
|
| 'Saturn': '/browse/platform/sega-saturn',
|
| 'Dreamcast': '/browse/platform/dreamcast'
|
| },
|
| 'Sony':{
|
| 'Playstation': '/browse/platform/playstation',
|
| 'Playstation 2': '/browse/platform/playstation-2'
|
| },
|
| 'Other':{
|
| 'Arcade': '/browse/platform/arcade',
|
| 'ColecoVision': '/browse/platform/colecovision'
|
| }
|
| }
|
|
|
|
|
| def get(self, path):
|
| super().get(f'{self.baseURL}{path}')
|
| return self.data
|
|
|
|
|
| def getArticle(self, soup):
|
| '''get the articles from a page and return them as a list of dictionaries
|
| containing each Game, URL, Img'''
|
|
|
| # list to return
|
| article = []
|
| # find all <article> tabs in the <div id='main'>
|
| articles = soup.find(id='main').find_all('article')
|
| for this in articles:
|
| # the first <div> in the article that has the background image, name, and link
|
| div = this.find_all('div')[0]
|
| # the first <a> tag in the div that has the link and name
|
| link = div.find_all('a')[0]
|
| article.append({
|
| # add the Game name from the text of the link
|
| 'Game': link.text(),
|
| # add the URL for the game page from the link href
|
| 'URL': link['href'],
|
| # add the Img for the game from the div background
|
| 'Img': div['style'].split('(')[1][:-2]
|
| })
|
| return article
|
|
|
|
|
| def update(self):
|
| newgames = self.getArtcle(self.get())
|
| super().update(newgames)
|
|
|
| title = "Main Repositories"
|
|
|
| [oldgamesdownload]
|
| name = "Old Games Download"
|
| platforms = ALL
|