updated stuffs
parent
0f4285475f
commit
a3cdd0294c
|
@ -1,4 +1,4 @@
|
||||||
Xft.dpi: 128
|
Xft.dpi: 160
|
||||||
! 96 = 1080p | 128 = 1440p
|
! 96 = 1080p | 128 = 1440p
|
||||||
|
|
||||||
Xft.antialias: true
|
Xft.antialias: true
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[global]
|
[global]
|
||||||
frame_width = 1
|
frame_width = 1
|
||||||
frame_color = "#FC7056"
|
frame_color = "#E57B81"
|
||||||
|
|
||||||
font = Noto Sans 10
|
font = Noto Sans 10
|
||||||
|
|
||||||
|
@ -181,18 +181,18 @@
|
||||||
[urgency_low]
|
[urgency_low]
|
||||||
# IMPORTANT: colors have to be defined in quotation marks.
|
# IMPORTANT: colors have to be defined in quotation marks.
|
||||||
# Otherwise the "#" and following would be interpreted as a comment.
|
# Otherwise the "#" and following would be interpreted as a comment.
|
||||||
background = "#120512"
|
background = "#060C13"
|
||||||
foreground = "#FC7056"
|
foreground = "#E57B81"
|
||||||
timeout = 10
|
timeout = 10
|
||||||
|
|
||||||
[urgency_normal]
|
[urgency_normal]
|
||||||
background = "#120512"
|
background = "#060C13"
|
||||||
foreground = "#FC7056"
|
foreground = "#E57B81"
|
||||||
timeout = 10
|
timeout = 10
|
||||||
|
|
||||||
[urgency_critical]
|
[urgency_critical]
|
||||||
background = "#120512"
|
background = "#060C13"
|
||||||
foreground = "#FC7056"
|
foreground = "#E57B81"
|
||||||
timeout = 0
|
timeout = 0
|
||||||
|
|
||||||
|
|
||||||
|
@ -252,8 +252,8 @@
|
||||||
# appname = claws-mail
|
# appname = claws-mail
|
||||||
# category = email.arrived
|
# category = email.arrived
|
||||||
# urgency = normal
|
# urgency = normal
|
||||||
# background = "#120512"
|
# background = "#060C13"
|
||||||
# foreground = "#FC7056"
|
# foreground = "#E57B81"
|
||||||
#
|
#
|
||||||
#[mute.sh]
|
#[mute.sh]
|
||||||
# appname = mute
|
# appname = mute
|
||||||
|
@ -263,40 +263,40 @@
|
||||||
#[JDownloader]
|
#[JDownloader]
|
||||||
# appname = JDownloader
|
# appname = JDownloader
|
||||||
# category = JD
|
# category = JD
|
||||||
# background = "#120512"
|
# background = "#060C13"
|
||||||
# foreground = "#FC7056"
|
# foreground = "#E57B81"
|
||||||
#
|
#
|
||||||
#[newsbeuter]
|
#[newsbeuter]
|
||||||
# summary = *Feeds*
|
# summary = *Feeds*
|
||||||
# background = "#120512"
|
# background = "#060C13"
|
||||||
# foreground = "#FC7056"
|
# foreground = "#E57B81"
|
||||||
#
|
#
|
||||||
[irc]
|
[irc]
|
||||||
appname = weechat
|
appname = weechat
|
||||||
timeout = 0
|
timeout = 0
|
||||||
background = "#120512"
|
background = "#060C13"
|
||||||
foreground = "#FC7056"
|
foreground = "#E57B81"
|
||||||
#
|
#
|
||||||
[weechat hl]
|
[weechat hl]
|
||||||
appname = weechat
|
appname = weechat
|
||||||
category = weechat.HL
|
category = weechat.HL
|
||||||
background = "#120512"
|
background = "#060C13"
|
||||||
foreground = "#FC7056"
|
foreground = "#E57B81"
|
||||||
#
|
#
|
||||||
[weechat pn]
|
[weechat pn]
|
||||||
appname = weechat
|
appname = weechat
|
||||||
category = weechat.PM
|
category = weechat.PM
|
||||||
background = "#120512"
|
background = "#060C13"
|
||||||
foreground = "#FC7056"
|
foreground = "#E57B81"
|
||||||
#
|
#
|
||||||
#[CMUS]
|
#[CMUS]
|
||||||
# appname = CMUS
|
# appname = CMUS
|
||||||
# category = cmus
|
# category = cmus
|
||||||
# background = "#120512"
|
# background = "#060C13"
|
||||||
# foreground = "#FC7056"
|
# foreground = "#E57B81"
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
# background = "#120512"
|
# background = "#060C13"
|
||||||
# foreground = "#FC7056"
|
# foreground = "#E57B81"
|
||||||
#
|
#
|
||||||
# vim: ft=cfg
|
# vim: ft=cfg
|
||||||
|
|
|
@ -441,8 +441,7 @@ exec pkill picom
|
||||||
exec picom -b -f
|
exec picom -b -f
|
||||||
|
|
||||||
# --> Music
|
# --> Music
|
||||||
exec pkill mpd && mpd ~/.config/mpd/mpd.conf
|
exec_always ~/.scripts/init/audio-fix.sh
|
||||||
exec mpc update
|
|
||||||
|
|
||||||
# --> Cross-platform M&K Support
|
# --> Cross-platform M&K Support
|
||||||
exec synergyc --no-tray --restart --name Gray 192.168.1.236:24800
|
exec synergyc --no-tray --restart --name Gray 192.168.1.236:24800
|
||||||
|
@ -460,9 +459,6 @@ bindsym $mod+Shift+h exec kitty -e sudo sh ~/.scripts/init/domain-fix.sh
|
||||||
bindsym $mod+Shift+t exec timedatectl set-ntp true && timedatectl set-ntp false
|
bindsym $mod+Shift+t exec timedatectl set-ntp true && timedatectl set-ntp false
|
||||||
|
|
||||||
# --> VPN Connect
|
# --> VPN Connect
|
||||||
exec piactl background enable
|
#exec piactl background enable
|
||||||
exec piactl connect
|
#exec piactl connect
|
||||||
|
|
||||||
# Unused
|
|
||||||
########################################################
|
|
||||||
|
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
# be disabled and audio files will only be accepted over ipc socket (using
|
# be disabled and audio files will only be accepted over ipc socket (using
|
||||||
# file:// protocol) or streaming files over an accepted protocol.
|
# file:// protocol) or streaming files over an accepted protocol.
|
||||||
#
|
#
|
||||||
music_directory "/run/media/cdnutter/Windows\ 10/Users/Gray/Music"
|
music_directory "/home/cdnutter/Storage/Gray/Windows_10/Users/Gray/Music"
|
||||||
#
|
#
|
||||||
# This setting sets the MPD internal playlist directory. The purpose of this
|
# This setting sets the MPD internal playlist directory. The purpose of this
|
||||||
# directory is storage for playlists created by MPD. The server will use
|
# directory is storage for playlists created by MPD. The server will use
|
||||||
|
|
|
@ -63,7 +63,7 @@ bottom = true
|
||||||
fixed-center = true
|
fixed-center = true
|
||||||
line-size = 0
|
line-size = 0
|
||||||
|
|
||||||
dpi = 128
|
dpi = 160
|
||||||
|
|
||||||
background = ${color.bg}
|
background = ${color.bg}
|
||||||
foreground = ${color.fg}
|
foreground = ${color.fg}
|
||||||
|
|
5
.dmenurc
5
.dmenurc
|
@ -3,7 +3,8 @@
|
||||||
#
|
#
|
||||||
|
|
||||||
## define the font for dmenu to be used
|
## define the font for dmenu to be used
|
||||||
DMENU_FN="Noto-10.5"
|
#DMENU_FN="Noto-10.5"
|
||||||
|
DMENU_FN="Fantasque Sans Mono-12.0"
|
||||||
|
|
||||||
## background colour for unselected menu-items
|
## background colour for unselected menu-items
|
||||||
DMENU_NB="#222D31"
|
DMENU_NB="#222D31"
|
||||||
|
@ -18,7 +19,7 @@ DMENU_SB="#16A085"
|
||||||
DMENU_SF="#F9FAF9"
|
DMENU_SF="#F9FAF9"
|
||||||
|
|
||||||
## command for the terminal application to be used:
|
## command for the terminal application to be used:
|
||||||
TERMINAL_CMD="terminal -e"
|
TERMINAL_CMD="kitty -e"
|
||||||
|
|
||||||
## export our variables
|
## export our variables
|
||||||
DMENU_OPTIONS="-fn $DMENU_FN -nb $DMENU_NB -nf $DMENU_NF -sf $DMENU_SF -sb $DMENU_SB"
|
DMENU_OPTIONS="-fn $DMENU_FN -nb $DMENU_NB -nf $DMENU_NF -sf $DMENU_SF -sb $DMENU_SB"
|
||||||
|
|
|
@ -24,6 +24,7 @@ https://www.youtube.com/feeds/videos.xml?channel_id=UCJr2kZImi7_gGkeJ3avS_Wg "Yo
|
||||||
-------------------------------------------
|
-------------------------------------------
|
||||||
|
|
||||||
# Reddit
|
# Reddit
|
||||||
|
https://www.reddit.com/r/gamesale.rss "Reddit" "~/r/GameSale"
|
||||||
https://www.reddit.com/r/jailbreak.rss "Reddit" "~/r/jailbreak"
|
https://www.reddit.com/r/jailbreak.rss "Reddit" "~/r/jailbreak"
|
||||||
https://www.reddit.com/r/unixporn.rss "Reddit" "~/r/unixporn"
|
https://www.reddit.com/r/unixporn.rss "Reddit" "~/r/unixporn"
|
||||||
https://www.reddit.com/r/sffpc.rss "Reddit" "~/r/sffpc"
|
https://www.reddit.com/r/sffpc.rss "Reddit" "~/r/sffpc"
|
||||||
|
|
|
@ -0,0 +1,23 @@
|
||||||
|
#!/usr/bin/env zsh
|
||||||
|
|
||||||
|
# support for Java, CPP, C, zsh/sh/bash, Python
|
||||||
|
# more will be added when I use more file types
|
||||||
|
# requires: enscript, ps2pdf (might be poppler-utils)
|
||||||
|
|
||||||
|
function find_extension(){
|
||||||
|
header="$(head -n1 "$1")"
|
||||||
|
[[ "$(grep "env" "$1")" ]] && file_extension="$(echo "$header" | awk '{print $2}')"
|
||||||
|
[[ "$(grep "\#\!\/bin" "$1")" ]] && file_extension="$(basename "$1")"
|
||||||
|
[[ "$file_extension" == "zsh" ]] && file_extension="bash"
|
||||||
|
}
|
||||||
|
|
||||||
|
for file in "$@"; do
|
||||||
|
[[ ! -f "$file" ]] && break
|
||||||
|
filename="$file:t:r"
|
||||||
|
file_extension="$file:t:e"
|
||||||
|
[[ -z "$file_extension" ]] && find_extension "$file"
|
||||||
|
[[ "$file_extension" == "py" ]] && file_extension="python"
|
||||||
|
|
||||||
|
enscript -rG --word-wrap --line-numbers -p - --highlight="$file_extension" --color=1 -f Courier8 -c "$file"\
|
||||||
|
| ps2pdf - "$filename".pdf
|
||||||
|
done
|
|
@ -0,0 +1,6 @@
|
||||||
|
2020-11-07 11:23:15,190 ------------------------------------------------------------------------------------------
|
||||||
|
2020-11-07 11:23:15,190
|
||||||
|
2020-11-07 11:23:15,190
|
||||||
|
2020-11-07 11:23:15,190 Starting
|
||||||
|
2020-11-07 11:23:15,190 base_url:
|
||||||
|
2020-11-07 11:23:15,190 base_dir:
|
|
@ -0,0 +1,75 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Download all the pdfs linked on a given webpage
|
||||||
|
|
||||||
|
Usage -
|
||||||
|
|
||||||
|
python grab_pdfs.py url <path/to/directory>
|
||||||
|
url is required
|
||||||
|
path is optional. Path needs to be absolute
|
||||||
|
will save in the current directory if no path is given
|
||||||
|
will save in the current directory if given path does not exist
|
||||||
|
|
||||||
|
Requires - requests >= 1.0.4
|
||||||
|
beautifulsoup >= 4.0.0
|
||||||
|
|
||||||
|
Download and install using
|
||||||
|
|
||||||
|
pip install requests
|
||||||
|
pip install beautifulsoup4
|
||||||
|
"""
|
||||||
|
|
||||||
|
__author__= 'elssar <elssar@altrawcode.com>'
|
||||||
|
__license__= 'MIT'
|
||||||
|
__version__= '1.0.0'
|
||||||
|
|
||||||
|
from requests import get
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
from os import path, getcwd
|
||||||
|
from bs4 import BeautifulSoup as soup
|
||||||
|
from sys import argv
|
||||||
|
|
||||||
|
def get_page(base_url):
|
||||||
|
req= get(base_url)
|
||||||
|
if req.status_code==200:
|
||||||
|
return req.text
|
||||||
|
raise Exception('Error {0}'.format(req.status_code))
|
||||||
|
|
||||||
|
def get_all_links(html):
|
||||||
|
bs= soup(html)
|
||||||
|
links= bs.findAll('a')
|
||||||
|
return links
|
||||||
|
|
||||||
|
def get_pdf(base_url, base_dir):
|
||||||
|
html= get_page()
|
||||||
|
links= get_all_links(html)
|
||||||
|
if len(links)==0:
|
||||||
|
raise Exception('No links found on the webpage')
|
||||||
|
n_pdfs= 0
|
||||||
|
for link in links:
|
||||||
|
if link['href'][-4:]=='.pdf':
|
||||||
|
n_pdfs+= 1
|
||||||
|
content= get(urljoin(base_url, link['href']))
|
||||||
|
if content.status==200 and content.headers['content-type']=='application/pdf':
|
||||||
|
with open(path.join(base_dir, link.text+'.pdf'), 'wb') as pdf:
|
||||||
|
pdf.write(content.content)
|
||||||
|
if n_pdfs==0:
|
||||||
|
raise Exception('No pdfs found on the page')
|
||||||
|
print("{n_pdfs} pdfs downloaded and saved in {base_dir}")
|
||||||
|
|
||||||
|
if __name__=='__main__':
|
||||||
|
if len(argv) not in (2, 3):
|
||||||
|
print('Error! Invalid arguments')
|
||||||
|
print(__doc__)
|
||||||
|
exit(-1)
|
||||||
|
arg= ''
|
||||||
|
url= argv[1]
|
||||||
|
if len(argv)==3:
|
||||||
|
arg= argv[2]
|
||||||
|
base_dir= [getcwd(), arg][path.isdir(arg)]
|
||||||
|
try:
|
||||||
|
get_pdf(base_dir)
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
exit(-1)
|
|
@ -0,0 +1,75 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Download all the pdfs linked on a given webpage
|
||||||
|
|
||||||
|
Usage -
|
||||||
|
|
||||||
|
python grab_pdfs.py url <path/to/directory>
|
||||||
|
url is required
|
||||||
|
path is optional. Path needs to be absolute
|
||||||
|
will save in the current directory if no path is given
|
||||||
|
will save in the current directory if given path does not exist
|
||||||
|
|
||||||
|
Requires - requests >= 1.0.4
|
||||||
|
beautifulsoup >= 4.0.0
|
||||||
|
|
||||||
|
Download and install using
|
||||||
|
|
||||||
|
pip install requests
|
||||||
|
pip install beautifulsoup4
|
||||||
|
"""
|
||||||
|
|
||||||
|
__author__= 'elssar <elssar@altrawcode.com>'
|
||||||
|
__license__= 'MIT'
|
||||||
|
__version__= '1.0.0'
|
||||||
|
|
||||||
|
from requests import get
|
||||||
|
from urlparse import urljoin
|
||||||
|
from os import path, getcwd
|
||||||
|
from bs4 import BeautifulSoup as soup
|
||||||
|
from sys import argv
|
||||||
|
|
||||||
|
def get_page(base_url):
|
||||||
|
req= get(base_url)
|
||||||
|
if req.status_code==200:
|
||||||
|
return req.text
|
||||||
|
raise Exception('Error {0}'.format(req.status_code))
|
||||||
|
|
||||||
|
def get_all_links(html):
|
||||||
|
bs= soup(html)
|
||||||
|
links= bs.findAll('a')
|
||||||
|
return links
|
||||||
|
|
||||||
|
def get_pdf(base_url, base_dir):
|
||||||
|
html= get_page()
|
||||||
|
links= get_all_links(html)
|
||||||
|
if len(links)==0:
|
||||||
|
raise Exception('No links found on the webpage')
|
||||||
|
n_pdfs= 0
|
||||||
|
for link in links:
|
||||||
|
if link['href'][-4:]=='.pdf':
|
||||||
|
n_pdfs+= 1
|
||||||
|
content= get(urljoin(base_url, link['href']))
|
||||||
|
if content.status==200 and content.headers['content-type']=='application/pdf':
|
||||||
|
with open(path.join(base_dir, link.text+'.pdf'), 'wb') as pdf:
|
||||||
|
pdf.write(content.content)
|
||||||
|
if n_pdfs==0:
|
||||||
|
raise Exception('No pdfs found on the page')
|
||||||
|
print "{0} pdfs downloaded and saved in {1}".format(n_pdfs, base_dir)
|
||||||
|
|
||||||
|
if __name__=='__main__':
|
||||||
|
if len(argv) not in (2, 3):
|
||||||
|
print 'Error! Invalid arguments'
|
||||||
|
print __doc__
|
||||||
|
exit(-1)
|
||||||
|
arg= ''
|
||||||
|
url= argv[1]
|
||||||
|
if len(argv)==3:
|
||||||
|
arg= argv[2]
|
||||||
|
base_dir= [getcwd(), arg][path.isdir(arg)]
|
||||||
|
try:
|
||||||
|
get_pdf(base_dir)
|
||||||
|
except Exception, e:
|
||||||
|
print e
|
||||||
|
exit(-1)
|
|
@ -0,0 +1,142 @@
|
||||||
|
def get_pdf(base_url, base_dir):
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Download all the pdfs linked on a given webpage
|
||||||
|
Usage -
|
||||||
|
python grab_pdfs.py url <path/to/directory>
|
||||||
|
url is required
|
||||||
|
path is optional. Path needs to be absolute
|
||||||
|
will save in the current directory if no path is given
|
||||||
|
will save in the current directory if given path does not exist
|
||||||
|
Requires - requests >= 1.0.4
|
||||||
|
beautifulsoup >= 4.0.0
|
||||||
|
Download and install using
|
||||||
|
|
||||||
|
pip install requests
|
||||||
|
pip install beautifulsoup4
|
||||||
|
"""
|
||||||
|
|
||||||
|
__author__ = 'elssar <elssar@altrawcode.com>'
|
||||||
|
__license__ = 'MIT'
|
||||||
|
__version__ = '1.0.0'
|
||||||
|
|
||||||
|
from requests import get
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
from os import path, getcwd
|
||||||
|
from bs4 import BeautifulSoup as soup
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logging.basicConfig(filename=path.join(base_dir, 'downloads_log.log'), level=logging.INFO,
|
||||||
|
format='%(asctime)s %(message)s')
|
||||||
|
|
||||||
|
# if __name__ == '__main__':
|
||||||
|
# # if len(argv) not in (2, 3):
|
||||||
|
# if len(sys.argv)!=2:
|
||||||
|
# print("This is the name of the script: ", sys.argv[0])
|
||||||
|
# print("Number of arguments: ", len(sys.argv))
|
||||||
|
# print("The arguments are: ", str(sys.argv))
|
||||||
|
# print('Error! Invalid arguments')
|
||||||
|
# print(__doc__)
|
||||||
|
# exit(-1)
|
||||||
|
# arg = ''
|
||||||
|
# url = sys.argv[1]
|
||||||
|
# if len(sys.argv) == 3:
|
||||||
|
# arg = sys.argv[2]
|
||||||
|
# base_dir = [getcwd(), arg][path.isdir(arg)]
|
||||||
|
# try:
|
||||||
|
# get_pdf(base_dir)
|
||||||
|
# except Exception as e:
|
||||||
|
# print(e)
|
||||||
|
# exit(-1)
|
||||||
|
|
||||||
|
def get_page(base_url):
|
||||||
|
req = get(base_url)
|
||||||
|
if req.status_code == 200:
|
||||||
|
return req.text
|
||||||
|
logging.warning('http status_code: ' + req.status_code)
|
||||||
|
raise Exception('Error {0}'.format(req.status_code))
|
||||||
|
|
||||||
|
|
||||||
|
def get_all_links(html):
|
||||||
|
bs = soup(html, 'html.parser') # MISSING 'html.parser'
|
||||||
|
links = bs.findAll('a')
|
||||||
|
return links
|
||||||
|
|
||||||
|
logging.info('------------------------------------------------------------------------------------------')
|
||||||
|
logging.info('')
|
||||||
|
logging.info('')
|
||||||
|
logging.info('Starting')
|
||||||
|
logging.info('base_url: ' + base_url)
|
||||||
|
logging.info('base_dir: ' + base_dir)
|
||||||
|
|
||||||
|
html = get_page(base_url) #MISSING ARGUMENT
|
||||||
|
links = get_all_links(html)
|
||||||
|
if len(links) == 0:
|
||||||
|
logging.warning('No links found on the webpage.')
|
||||||
|
raise Exception('No links found on the webpage')
|
||||||
|
|
||||||
|
n_pdfs = 0
|
||||||
|
n_saved_pdfs = 0
|
||||||
|
|
||||||
|
for link in links:
|
||||||
|
current_link = link.get('href') #This line and the line below
|
||||||
|
if current_link.endswith('pdf'):
|
||||||
|
# if link['href'][-4:] == '.pdf':
|
||||||
|
weblink = urljoin(base_url, link['href'])
|
||||||
|
logging.info('pdf file found at ' + weblink)
|
||||||
|
print('pdf file found:', weblink)
|
||||||
|
|
||||||
|
n_pdfs += 1
|
||||||
|
|
||||||
|
file_address = path.join(base_dir, str(current_link).split('/')[-1]) #It is not necessary to add .pdf to the end of the filename
|
||||||
|
|
||||||
|
# print('base_dir',base_dir)
|
||||||
|
# print('file_address',file_address)
|
||||||
|
# print(path.exists(file_address))
|
||||||
|
|
||||||
|
if path.exists(file_address) == False:
|
||||||
|
content = get(weblink, stream=True) #https://stackoverflow.com/a/44299915/2449724
|
||||||
|
#stream=True means when function returns, only the response header is downloaded, response body is not.
|
||||||
|
|
||||||
|
if content.status_code == 200 and content.headers['content-type'] == 'application/pdf': #status to status_code
|
||||||
|
print('File size(mb)', round(float(content.headers['Content-length']) / 1000000), 2, sep=',')
|
||||||
|
with open(file_address, 'wb') as pdf:
|
||||||
|
logging.info('Saving pdf to ' + file_address)
|
||||||
|
print('Saving pdf to', file_address)
|
||||||
|
|
||||||
|
pdf.write(content.content)
|
||||||
|
|
||||||
|
logging.info('COMPLETE')
|
||||||
|
print('COMPLETE')
|
||||||
|
|
||||||
|
n_saved_pdfs +=1
|
||||||
|
logging.info('Number of save pdfs is ' + str(n_saved_pdfs))
|
||||||
|
print()
|
||||||
|
else:
|
||||||
|
logging.info('content.status_code: ' + str(content.status_code))
|
||||||
|
logging.info('''content.headers['content-type']:''' + content.headers['content-type'])
|
||||||
|
print('content.status_code:', content.status_code)
|
||||||
|
print('''content.headers['content-type']:''', content.headers['content-type'])
|
||||||
|
print()
|
||||||
|
|
||||||
|
else:
|
||||||
|
logging.info('Already saved.')
|
||||||
|
print('Already saved')
|
||||||
|
n_saved_pdfs += 1
|
||||||
|
print()
|
||||||
|
if n_pdfs == 0:
|
||||||
|
logging.info('No pdfs found on the page.')
|
||||||
|
raise Exception('No pdfs found on the page')
|
||||||
|
|
||||||
|
logging.info("{0} pdfs found, {1} saved in {2}".format(n_pdfs, n_saved_pdfs, base_dir))
|
||||||
|
print("{0} pdfs found, {1} saved in {2}".format(n_pdfs, n_saved_pdfs, base_dir))
|
||||||
|
|
||||||
|
base_url = ''
|
||||||
|
base_dir = '' #example r'C:\User'
|
||||||
|
|
||||||
|
if base_dir or base_url:
|
||||||
|
print('Please, assign values to base_url and base_dir.')
|
||||||
|
|
||||||
|
get_pdf(base_url,base_dir)
|
|
@ -0,0 +1,80 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Download all the pdfs linked on a given webpage
|
||||||
|
|
||||||
|
Usage -
|
||||||
|
|
||||||
|
python grab_pdfs.py url <path/to/directory>
|
||||||
|
url is required
|
||||||
|
path is optional. Path needs to be absolute
|
||||||
|
will save in the current directory if no path is given
|
||||||
|
will save in the current directory if given path does not exist
|
||||||
|
|
||||||
|
Requires - requests >= 1.0.4
|
||||||
|
beautifulsoup >= 4.0.0
|
||||||
|
|
||||||
|
Download and install using
|
||||||
|
|
||||||
|
pip install requests
|
||||||
|
pip install beautifulsoup4
|
||||||
|
"""
|
||||||
|
|
||||||
|
__author__= 'elssar <elssar@altrawcode.com>'
|
||||||
|
__license__= 'MIT'
|
||||||
|
__version__= '1.0.0'
|
||||||
|
|
||||||
|
from requests import get
|
||||||
|
from urllib.parse import urljoin
|
||||||
|
from os import path, getcwd
|
||||||
|
from bs4 import BeautifulSoup as soup
|
||||||
|
from sys import argv
|
||||||
|
|
||||||
|
features="html.parser"
|
||||||
|
|
||||||
|
def get_page(base_url):
|
||||||
|
req= get(base_url)
|
||||||
|
if req.status_code==200:
|
||||||
|
return req.text
|
||||||
|
raise Exception('Error {0}'.format(req.status_code))
|
||||||
|
|
||||||
|
def get_all_links(html):
|
||||||
|
bs= soup(html)
|
||||||
|
links= bs.findAll('a')
|
||||||
|
return links
|
||||||
|
|
||||||
|
def get_pdf(base_url, base_dir):
|
||||||
|
html= get_page(base_url)
|
||||||
|
links= get_all_links(html)
|
||||||
|
if len(links)==0:
|
||||||
|
raise Exception('No links found on the webpage')
|
||||||
|
n_pdfs= 0
|
||||||
|
for link in links:
|
||||||
|
if link['href'][-4:]=='.pdf':
|
||||||
|
n_pdfs+= 1
|
||||||
|
content= get(urljoin(base_url, link['href']))
|
||||||
|
if content.status==200 and content.headers['content-type']=='application/pdf':
|
||||||
|
with open(path.join(base_dir, link.text+'.pdf'), 'wb') as pdf:
|
||||||
|
pdf.write(content.content)
|
||||||
|
if n_pdfs==0:
|
||||||
|
raise Exception('No pdfs found on the page')
|
||||||
|
print(f"{n_pdfs} pdfs downloaded and saved in {base_dir}")
|
||||||
|
|
||||||
|
if __name__=='__main__':
|
||||||
|
if len(argv) not in (2, 3):
|
||||||
|
print('Error! Invalid arguments')
|
||||||
|
print(__doc__)
|
||||||
|
exit(-1)
|
||||||
|
arg= ''
|
||||||
|
url= argv[1]
|
||||||
|
if len(argv)==3:
|
||||||
|
arg= argv[2]
|
||||||
|
base_dir= [getcwd(), arg][path.isdir(arg)]
|
||||||
|
try:
|
||||||
|
print("[-] There might be an error here")
|
||||||
|
print(url, base_dir)
|
||||||
|
get_pdf(base_url=url, base_dir=base_dir)
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
exit(-1)
|
||||||
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
# /bin/env/zsh
|
||||||
|
|
||||||
|
pulseaudio -k
|
||||||
|
mpd --kill
|
||||||
|
pkill pulseaudio
|
||||||
|
pkill mpd
|
||||||
|
|
||||||
|
mpd ~/.config/mpd/mpd.conf
|
||||||
|
mpc update
|
||||||
|
pulseaudio
|
|
@ -7,7 +7,6 @@ EDITOR=vim
|
||||||
#echo "Make sure you are 100% the connection you have is proper or else it'll assume outgoing everytime."
|
#echo "Make sure you are 100% the connection you have is proper or else it'll assume outgoing everytime."
|
||||||
#echo "Ctrl-C or Ctrl-D to cancel now. Otherwise Press any key to continue..."
|
#echo "Ctrl-C or Ctrl-D to cancel now. Otherwise Press any key to continue..."
|
||||||
#read -p ""
|
#read -p ""
|
||||||
#if curl ipinfo.io/ip | grep -A 2 --quiet -oh "107.221." # UPDATE: IT DIDN'T WORK! MY METHOD PREVAILS!...Switched ip websites, this one should work the same just curl is cleaner.
|
|
||||||
|
|
||||||
#if wget http://ipecho.net/plain -O - -q | grep -A 2 --quiet -oh "107.221." # Add more to the public to make it more accurate however this should be fine.
|
#if wget http://ipecho.net/plain -O - -q | grep -A 2 --quiet -oh "107.221." # Add more to the public to make it more accurate however this should be fine.
|
||||||
if ifconfig | grep -oh --quiet "192.168.1.127" # Decided to switch to internal ip instead (the odds of this being a problem are like 1/1,000,000.
|
if ifconfig | grep -oh --quiet "192.168.1.127" # Decided to switch to internal ip instead (the odds of this being a problem are like 1/1,000,000.
|
||||||
|
|
|
@ -4,36 +4,67 @@ EDITOR=vim
|
||||||
|
|
||||||
##############################
|
##############################
|
||||||
|
|
||||||
|
# 1080p
|
||||||
low_dpi() {
|
low_dpi() {
|
||||||
sed '1 s/128/96/g' ~/.Xresources > /tmp/xres
|
sed '1 s/128/96/g' ~/.Xresources > /tmp/xres
|
||||||
|
sed '1 s/160/96/g' ~/.Xresources > /tmp/xres
|
||||||
|
|
||||||
|
sed '66 s/128/96/g' ~/.config/polybar/config.ini > /tmp/polyconf
|
||||||
|
sed '66 s/160/96/g' ~/.config/polybar/config.ini > /tmp/polyconf
|
||||||
|
|
||||||
|
mv /tmp/polyconf ~/.config/polybar/config.ini
|
||||||
mv /tmp/xres ~/.Xresources
|
mv /tmp/xres ~/.Xresources
|
||||||
}
|
}
|
||||||
|
|
||||||
hi_dpi() {
|
# 1440p
|
||||||
|
mid_dpi() {
|
||||||
sed '1 s/96/128/g' ~/.Xresources > /tmp/xres
|
sed '1 s/96/128/g' ~/.Xresources > /tmp/xres
|
||||||
|
sed '1 s/160/128/g' ~/.Xresources > /tmp/xres
|
||||||
|
|
||||||
|
sed '66 s/96/128/g' ~/.config/polybar/config.ini > /tmp/polyconf
|
||||||
|
sed '66 s/160/128/g' ~/.config/polybar/config.ini > /tmp/polyconf
|
||||||
|
|
||||||
|
mv /tmp/polyconf ~/.config/polybar/config.ini
|
||||||
mv /tmp/xres ~/.Xresources
|
mv /tmp/xres ~/.Xresources
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# 2160p
|
||||||
|
high_dpi() {
|
||||||
|
sed '1 s/96/160/g' ~/.Xresources > /tmp/xres
|
||||||
|
sed '1 s/128/160/g' ~/.Xresources > /tmp/xres
|
||||||
|
|
||||||
|
sed '66 s/96/160/g' ~/.config/polybar/config.ini > /tmp/polyconf
|
||||||
|
sed '66 s/128/160/g' ~/.config/polybar/config.ini > /tmp/polyconf
|
||||||
|
|
||||||
|
mv /tmp/polyconf ~/.config/polybar/config.ini
|
||||||
|
mv /tmp/xres ~/.Xresources
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
##############################
|
##############################
|
||||||
|
|
||||||
if xdpyinfo | grep --quiet -oh 3440x1440
|
if xdpyinfo | grep --quiet -oh 3440x1440
|
||||||
then
|
then
|
||||||
hi_dpi
|
mid_dpi
|
||||||
xrandr --rate 120 # ONLY FOR 21:9 DELL MONITOR
|
xrandr --rate 120 # ONLY FOR 21:9 DELL MONITOR
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if xdpyinfo | grep --quiet -oh 2560x1440
|
if xdpyinfo | grep --quiet -oh 2560x1440
|
||||||
then
|
then
|
||||||
hi_dpi
|
mid_dpi
|
||||||
xrandr --rate 144 # ONLY FOR 16:9 ACER MONITOR
|
xrandr --rate 144 # ONLY FOR 16:9 ACER MONITOR
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if xdpyinfo | grep --quiet -oh 3840x2160
|
||||||
|
then
|
||||||
|
high_dpi
|
||||||
|
#xrandr --rate 144 # ONLY FOR 16:9 4K LG MONITOR
|
||||||
|
fi
|
||||||
|
|
||||||
if xdpyinfo | grep --quiet -oh 1080
|
if xdpyinfo | grep --quiet -oh 1080
|
||||||
then
|
then
|
||||||
low_dpi
|
low_dpi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
#############################
|
#############################
|
||||||
|
|
22
.zshrc
22
.zshrc
|
@ -105,24 +105,32 @@ source $ZSH/oh-my-zsh.sh
|
||||||
alias icat="kitty +kitten icat"
|
alias icat="kitty +kitten icat"
|
||||||
alias please="sudo"
|
alias please="sudo"
|
||||||
|
|
||||||
alias sl="sh ~/.scripts/loop-sl.sh"
|
|
||||||
alias git-out="sh ~/.scripts/git/out.sh"
|
alias git-out="sh ~/.scripts/git/out.sh"
|
||||||
alias git-in="sh ~/.scripts/git/in.sh"
|
alias git-in="sh ~/.scripts/git/in.sh"
|
||||||
|
|
||||||
alias mount-purple="sh ~/.scripts/mount/purple.sh"
|
|
||||||
alias mount-blue="sh ~/.scripts/mount/blue.sh"
|
|
||||||
alias mount-\*="sh ~/.scripts/mount/blue.sh && sudo ~/.scripts/mount/purple.sh"
|
|
||||||
|
|
||||||
alias neo="sh ~/.scripts/fun/neofetch.sh"
|
alias neo="sh ~/.scripts/fun/neofetch.sh"
|
||||||
alias for="sh ~/.scripts/fun/fortune.sh"
|
alias for="sh ~/.scripts/fun/fortune.sh"
|
||||||
|
|
||||||
|
alias ccat="clear && cat"
|
||||||
|
|
||||||
|
alias skip-integ="export MAKEPKG='makepkg --skipinteg'"
|
||||||
|
|
||||||
## wal
|
## wal
|
||||||
|
|
||||||
(cat ~/.cache/wal/sequences &)
|
(cat ~/.cache/wal/sequences &)
|
||||||
cat ~/.cache/wal/sequences
|
cat ~/.cache/wal/sequences
|
||||||
|
|
||||||
## Zathura-Pywal
|
## DEVKIT
|
||||||
export PATH="/home/cdnutter/.local/bin:$PATH"
|
|
||||||
|
export DEVKITPRO=/opt/devkitpro
|
||||||
|
export DEVKITARM=/opt/devkitpro/devkitARM
|
||||||
|
export DEVKITPPC=/opt/devkitpro/devkitPPC
|
||||||
|
|
||||||
## PATH
|
## PATH
|
||||||
|
export PATH="/home/cdnutter/.local/bin:$PATH"
|
||||||
|
export PATH="/home/cdnutter/.programs:$PATH"
|
||||||
|
export PATH="${DEVKITARM}/bin/:$PATH"
|
||||||
|
|
||||||
PATH=$PATH$(find "$HOME/.scripts" -type d -not -path '/.' -printf ":%p")
|
PATH=$PATH$(find "$HOME/.scripts" -type d -not -path '/.' -printf ":%p")
|
||||||
|
|
||||||
|
|
||||||
|
|
486
packages.txt
486
packages.txt
File diff suppressed because it is too large
Load Diff
Loading…
Reference in New Issue