dot

packages and services management
Log | Files | Refs | README

commit 7d311b2742319956f30eef79bd45df85f1c3f00d
parent 77f3c970f6f7218777ccaa75791c98bcf7a66dab
Author: josuah <mail@josuah.net>
Date:   Fri,  4 Nov 2016 14:26:19 -0400

Simpler www script.

Diffstat:
DBUILD/ag | 3---
Mbin/.config/feeds/read | 2++
Mbin/buffers | 2+-
Mbin/config-git | 2+-
Dbin/feeds | 208-------------------------------------------------------------------------------
Mbin/www | 119+++++++++++++++++++++++++++++++++++++++----------------------------------------
Mgit/projects | 12++++++++++++
Mshell/.profile | 2++
Mshell/functions | 9+++++++++
9 files changed, 86 insertions(+), 273 deletions(-)

diff --git a/BUILD/ag b/BUILD/ag @@ -1,3 +0,0 @@ -# The silver searcher - -tar=http://geoff.greer.fm/ag/releases/the_silver_searcher-0.33.0.tar.gz diff --git a/bin/.config/feeds/read b/bin/.config/feeds/read @@ -39,3 +39,5 @@ http://omicsomics.blogspot.com/2016/07/math-toys-much-enjoyed.html + + diff --git a/bin/buffers b/bin/buffers @@ -80,7 +80,7 @@ get_name() # run() { - local command="${1:-$(iomenu -c -l 256 < $CACHE)}" options path + local command="${1:-$(iomenu -l 256 < $CACHE)}" options path case "$command" in diff --git a/bin/config-git b/bin/config-git @@ -1,6 +1,6 @@ # Clone projects git repository to ~/Projects -cd +cd Projects sed 's/^[[:space:]]*#.*//; /^[[:space:]]*$/ d' "$CONFIG/git/projects" | while read recository location diff --git a/bin/feeds b/bin/feeds @@ -1,208 +0,0 @@ -# ,-- | -# |- ,--. ,--. .--| ,--- -# | |--' |--' | | `--. -# ' `--' `--' `--` ---' 2016-10-07 -#------------------------------------------------------------------------------- -# Simple RSS/ATOM feed reader -# - -FEEDS="${FEEDS-$HOME/.config/feeds}" -BROWSER="${BROWSER-firefox}" -CACHE="${CACHE-$HOME/.cache/feeds}" -HELP=" -NAME - feeds - Simple RSS/ATOM fead reader - -SYNTAX - feeds COMMAND - -DESCRIPTION - r[ead] Choose a feed to read in $BROWSER - - d[download] - Get/Update the feeds and the read/unread marks. - -ENVIRONMENT - FEEDS Path to the directory of feeds (./urls) and read (./read) list. - - BROWSER Command to open the url of the feeds. - - CACHE Path to the cache directory. - -FILES - $FEEDS/urls - List of RSS/ATOM urls, one link per line. - - $FEEDS/read - List of links already opened. - - $CACHE/* - Content of the RSS/ATOM files parsed by the rss.awk or atom.awk. -" - -awk_format_columns=' -BEGIN{ - max1=0; - max2=0; -} - -{ - l++; - - c1[l] = $1; # First column - - for (i=2; i < NF; i++) { # Middle column - c2[l] = c2[l]$(i)(i<NF?OFS:""); - } - - c3[l] = $NF; # Last column - - max1 = (length(c1[l]) > max1 ? length(c1[l]) : max1); - max2 = (length(c2[l]) > max2 ? length(c2[l]) : max2); -} - -END { - for (i = 1; i <= l; i++) { - printf("%-"max1"s %-"max2"s %s\n", c1[i], c2[i], c3[i]); - } -}' - - -# -# Check if feeds have been seen, and add a 'N' to the beginning of the line if -# so, and a '-' if not. Read stdin and print stdout. -# -check_read() -{ - while read line - do - read=0 - url="$(printf '%s' "$line" | awk '{ print $NF }')" - - if [ -z "$(grep -F "$url" "$FEEDS/read")" ] - then - printf '%s\n' "N $line" - else - printf '%s\n' "- $line" - fi - done -} - -# -# -# Check if the feed is RSS or ATOM, and transforms it in a list. -# -parse() -{ - local content="$1" - - if [ ! -z "$(printf '%s' "$content" | sed -n '/<rss[^>]*>/ p' )" ] - then - RS='<[/]?item[^>]*>' - begin='.*<link[^>]>' - end='<\/link>.*' - else - RS='<[/]?entry[^>]*>' - begin='.*href=\42' - end='\42.*' - fi - - printf '%s' "$content" | awk -v RS="$RS" ' -BEGIN { - max1=0; - max2=0; -} - -NR > 1 && NR < $(NR-1) { - title = $0 - sub(/.*<title[^>]*>/, "", title) - sub(/<\/title>.*/, "", title) - - url = $0 - sub(/'"$begin"'/, "", url) - sub(/'"$end"'/, "", url) - - printf("%s %s\n", title, url) -}' -} - - -# -# Download each URL from $FEEDS/urls, parse them, check for read/unread, and -# store the generated list in $FEEDS/feeds/$title. -# -download() -{ - local content title - - for feed in $(sed -r '/^(#.*)?$/ d' "$FEEDS/urls") - do - { - content="$(wget -qO - $feed)" - - title="$( - printf '%s' "$content" | - sed 's/<\/title>.*//' | - sed -n 's/.*<title[^>]*>// p' | - sed 1q | - tr ' [A-Z]' '_[a-z]' | - tr -cd '[a-z][1-9_.,-]' - )" - - printf '%s\n' "$title" - - parse "$content" | - check_read | - awk "$awk_format_columns" > "$CACHE/$title" - } & - done - - wait -} - - -# -# List and pompt the feed for displaying articles. -# -read_feeds() -{ - for feed in $(ls "$CACHE") - do - # count number of unread items - printf '%s #' "$feed" - sed -n '/^N / p' "$CACHE/$feed" | wc -l - - # update the read/unread marks - cut -c 3- "$CACHE/$feed" | check_read > "$CACHE/$feed" - done | - sort -r | - iomenu -s '#' | - sed 's/ *[0-9]* //' | - xargs -I {} cat "$CACHE/{}" | - iomenu | - tee -a "$FEEDS/read" | - xargs $BROWSER -} - - -main() -{ - [ ! -d "$CACHE" ] && mkdir -p "$CACHE" - touch "$FEEDS/read" - touch "$FEEDS/urls" - - case $1 in - d | download ) - download - ;; - - r | read ) - read_feeds - ;; - * ) - printf '%s\n' "$HELP" - ;; - esac -} - -main $@ diff --git a/bin/www b/bin/www @@ -1,27 +1,27 @@ # , , , , , , , , , # | | | | | | | | | -# `-'-' `-'-' `-'-' 2016-10-07 +# `-'-' `-'-' `-'-' 2016-11-04 #------------------------------------------------------------------------------- # Static website generator LAYOUT="layout.html" -NL=' -' -HELP=' + + +help() +{ + printf ' NAME www - static website generator USAGE - cd INPUT; www [-o OUTPUT] + www DESCRIPTION INPUT Source directory from which generate the website - OUTPUT Target directory for the generated website. - FILES - INPUT/layout.html + INPUT/%s File containing the pages layout INPUT/_* @@ -30,7 +30,7 @@ FILES INPUT/*/README The "README" files content can be displayed on the pages. -FILE FORMAT +LAYOUT FORMAT Within a layout.html, there is some templating features, using keywords. {{ content }} @@ -43,106 +43,105 @@ FILE FORMAT Relative path to the root of the website, for relative links. {{ title }} Page title, given by the path, with "/" replaced by " > ". -' +' "$LAYOUT" + + exit 1 +} parse() { - local item="$1" input="$2" root="$3" + local item="$1" input="$2" - # Print the input up to the next keyword, and shift position in input + # print the input up to the next keyword, and shift position in input printf %s "${input%%{{*}" - # Check for remaining keywords. + # check for remaining keywords [ "${input##*{{*}" ] && return 0 - # Clean out separator from the keyword + # clean out separator from the keyword input="${input#*{{}" - # Execute the keyword and print its output. + # execute the keyword and print its output case "${input%%\}\}*}" in *content* ) - [ -f "$item/README"* ] && tee < "$item/README"* + [ -f "$item/README" ] && cat "$item/README" ;; *list* ) - [ -z "$root" ] && list "$item" + [ "$item" != '.' ] && list "$item" ;; *nav* ) - parse "$item" "$(nav)" "$root" + parse "$item" "$(nav)" ;; *title* ) - printf %s "${item%/}" | sed 's/\// > /g' + printf %s "${item%/}" | sed 's_/_ > _g' ;; *root* ) - if [ -z "$root" ] - then printf %s "$item/" | sed 's/[^/]//g; s/\//..\//g; s/\/$//' - else printf '.' + if [ "$item" = '.' ] + then + printf '.' + else + printf %s/ "$item" | sed 's_[^/]__g; s_/_../_g; s_/$__' fi ;; esac - parse "$item" "${input#*\}\}}" "$root" + parse "$item" "${input#*\}\}}" } +# +# HTML list of the current directory content. +# list() { - printf '<ul>\n' + local dir="$1" - find "$1" -mindepth 1 -maxdepth 1 ! -name 'README' | sort | - while read -r path - do - printf '<li><a href="%s">%s</a></li>\n' \ - "${path#$1/}" "${path#$1/}" - done + printf '<ul>\n' - printf '</ul>\n' + ( # for cd not to affect current directory + cd "$dir" + find . -mindepth 1 -maxdepth 1 \ + ! -name 'README' \ + ! -name 'index.html' | + sort | + sed -r 's_..(.*)_<li><a href="\1">\1</a></li>_' + ) + + printf '</ul>\n' } +# +# HTML line of relative links to first level directory +# nav() { - find . -mindepth 1 -maxdepth 1 -type d ! -path '*/.*' | sort | - while read -r dir - do - printf ' | <a href="{{ root }}/%s">%s</a>' \ - "${dir#./}" "${dir#./}" - done + find . -mindepth 1 -maxdepth 1 -type d ! -path '*/.*' | + sort | + sed -r 's_..(.*)_ | <a href="{{ root }}/\1">\1</a>_' } main() { - if [ "$1" = '-o' -a "$#" = 2 ] - then - OUT="$2" - else - printf '%s\n' "$HELP" - exit 0 - fi - - # Remove $OUT's files but not hidden files. - mkdir -p "$OUT" - rm -r "$OUT/"* - - # Create pages from directory structure, copy static files - find . -mindepth 1 ! -path '*/.*' ! -name 'README' -type d | - while read -r path - do - mkdir -p "$OUT/$path" + [ "$#" -gt 0 ] || [ ! -f layout.html ] && help - parse "${path#./}" "$(tee < "$LAYOUT")" \ - > "$OUT/$path/index.html" - done + # delete previously generated "index.html" files + find -name index.html -exec rm {} \; - find . ! -path '*/.*' ! -name 'README' -type f -exec cp {} "$OUT/{}" \; + # create index.html pages for each non-dotfile directory + find . -mindepth 1 ! -path '*/.*' -type d | while IFS='' read -r d + do + parse "${d#./}" "$(cat "$LAYOUT")" > "$d/index.html" + done - # Create home page - parse '.' "$(tee < "$LAYOUT")" root > "$OUT/index.html" + # create home page, without listing "style.css", "favicon.png", ... + parse '.' "$(cat "$LAYOUT")" > index.html } main $@ diff --git a/git/projects b/git/projects @@ -0,0 +1,12 @@ +http://github.com/josuah/josuah.github.io +http://github.com/josuah/Notes ~/Notes +http://github.com/josuah/iirc +http://github.com/josuah/iomenu +http://github.com/josuah/ +http://github.com/josuah/ +http://github.com/josuah/ +http://github.com/josuah/ +http://github.com/josuah/ +http://github.com/josuah/ +http://github.com/josuah/ +http://github.com/josuah/ diff --git a/shell/.profile b/shell/.profile @@ -58,3 +58,5 @@ then theme pink setfont "$CONFIG/fonts/terminus/ter-v16b.psf.gz" fi + +printf '\n' diff --git a/shell/functions b/shell/functions @@ -134,6 +134,10 @@ rotate() esac | sudo tee /sys/class/graphics/fbcon/rotate_all > /dev/null } + +# VIDEO +#------------------------------------------------------------------------------- + # # FrameBuffer Video player based on mplayer # @@ -148,3 +152,8 @@ fbv() mplayer -vm -vo fbdev2 "$video" -vf scale -zoom -x "$x" -y "$y" } +screencast() +{ + ffmpeg -video_size 1366x768 -framerate 25 -f x11grab -i :0.0 \ + ~/${1:-screencast}.mp4 +}