Check-in by ben on 2025-11-09 01:26:03
Update pharos to version 14. Modify it to run under gophernicus
rather than geomyidae.
INSERTED DELETED
2 25 clean.sh
3 3 config.m4
53 35 make.sh
0 36 pharos/about.txt
pharos/account/index.dcgi
pharos/audio/index.gph
pharos/books/index.gph
pharos/debug/index.cgi
pharos/details/index.dcgi
pharos/direct/index.dcgi
pharos/download/index.dcgi
pharos/images/index.gph
pharos/index.gph
pharos/links/index.dcgi
pharos/list/index.dcgi
pharos/lists/index.dcgi
pharos/listsort/index.dcgi
pharos/raw/index.cgi
pharos/search/index.dcgi
pharos/software/index.gph
pharos/sort/index.dcgi
pharos/text/index.cgi
pharos/video/index.gph
0 10 pharos/wizard/index.gph
pharos/wizard/step1/index.dcgi
pharos/wizard/step2/index.dcgi
pharos/wizard/step3/index.dcgi
18 15 readme.txt
130 0 src/account.m4
0 126 src/account/index.dcgi.m4
21 0 src/audio/gophermap.m4
0 21 src/audio/index.gph.m4
22 0 src/books/gophermap.m4
0 22 src/books/index.gph.m4
26 54 src/cgi.awk
1 1 src/config.awk
230 0 src/details.m4
0 230 src/details/index.dcgi.m4
103 0 src/download.m4
0 106 src/download/index.dcgi.m4
26 0 src/gophermap.m4
14 0 src/images/gophermap.m4
0 14 src/images/index.gph.m4
0 26 src/index.gph.m4
229 0 src/list.m4
0 220 src/list/index.dcgi.m4
113 0 src/lists.m4
0 111 src/lists/index.dcgi.m4
54 0 src/listsort.m4
0 54 src/listsort/index.dcgi.m4
46 0 src/raw.m4
0 46 src/raw/index.cgi.m4
261 0 src/search.m4
0 255 src/search/index.dcgi.m4
28 0 src/software/gophermap.m4
0 28 src/software/index.gph.m4
61 0 src/sort.m4
0 61 src/sort/index.dcgi.m4
3 3 src/util.awk
23 0 src/video/gophermap.m4
0 23 src/video/index.gph.m4
9 10 src/web.awk
10 0 src/wizard/gophermap
65 0 src/wizard/step1.m4
0 67 src/wizard/step1/index.dcgi.m4
51 0 src/wizard/step2.m4
0 51 src/wizard/step2/index.dcgi.m4
45 0 src/wizard/step3.m4
0 44 src/wizard/step3/index.dcgi.m4
1647 1697 TOTAL over 47 changed files
Index: clean.sh
==================================================================
--- clean.sh
+++ clean.sh
@@ -1,27 +1,4 @@
#!/bin/sh
-set -x
-cd pharos
-rm -f index.gph \
- account/index.dcgi \
- audio/index.gph \
- books/index.gph \
- images/index.gph \
- debug/index.cgi \
- details/index.dcgi \
- direct/index.dcgi \
- download/index.dcgi \
- links/index.dcgi \
- list/index.dcgi \
- lists/index.dcgi \
- raw/index.cgi \
- search/index.dcgi \
- software/index.gph \
- sort/index.dcgi \
- text/index.cgi \
- video/index.gph \
- wizard/step1/index.dcgi \
- wizard/step2/index.dcgi \
- wizard/step3/index.dcgi
-set +x
-cd -
+find cgi -type f -exec rm {} \;
+find gopher -type f -exec rm {} \;
exit 0
Index: config.m4
==================================================================
--- config.m4
+++ config.m4
@@ -1,16 +1,17 @@
dnl Set configuration variables
dnl
-define(__PHAROS_VERSION__, 13)dnl
+define(__PHAROS_VERSION__, 14)dnl
dnl
define(__AGENT__, Lynx/2.9.0dev.10 libwww-FM/2.14 SSL-MM/1.4.1 OpenSSL/1.1.1w)dnl
define(__API_ENDPOINT__,
http://archive.org)dnl
define(__API_SSL_ENDPOINT__,
https://archive.org)dnl
define(__AWK_EXT__, 0)dnl
define(__CACHE_DB__, /home/user/pharos/db/cache.dat)dnl
define(__CACHE_ENABLED__, 0)dnl
-define(__CGIPATH__, /~user/pharos)dnl
+define(__CGIPATH__, /cgi)dnl
+define(__DOGPATH__, /~user/pharos)dnl
define(__CMD_AWK__, /usr/bin/awk)dnl
define(__CMD_CURL__, /usr/bin/curl)dnl
define(__CMD_ENV__, /usr/bin/env)dnl
define(__CMD_JSON2TSV__, /usr/local/bin/json2tsv)dnl
define(__CMD_MKTEMP__, /bin/mktemp)dnl
@@ -18,11 +19,10 @@
define(__CMD_SQLITE__, /usr/bin/sqlite3)dnl
define(__CMD_STRINGS__, /bin/busybox strings)dnl
define(__CMD_WEBDUMP__, /usr/local/bin/webdump)dnl
define(__CMD_XARGS__, /usr/bin/xargs)dnl
define(__CONTACT__,
[email protected])dnl
-define(__GEOMYIDAE_VERSION__, 0.96)dnl
define(__MAX_BIN_SIZE__, 10)dnl
define(__MAX_TXT_SIZE__, 1)dnl
define(__PASS_ENABLED__, 0)dnl
define(__PASS_LIST__, /home/user/pharos/pass.tsv)dnl
define(__SERVER__, server)dnl
Index: make.sh
==================================================================
--- make.sh
+++ make.sh
@@ -1,42 +1,60 @@
#!/bin/sh
-DESTDIR=pharos
SRC=src
+DST=gopher
+CGI=cgi
+
+build_cgi() {
+ build_gophermap $1 $2 $3
+ chmod a+rx $3
+ return
+}
-build() {
+build_gophermap() {
dir=$1
in=$2
out=$3
- echo "m4 $in >$out && chmod a+rx $out"
- m4 $in >$out && chmod a+rx $out
-}
-
-# PLAIN
-for f in index.gph \
- account/index.dcgi \
- audio/index.gph \
- books/index.gph \
- images/index.gph \
- details/index.dcgi \
- download/index.dcgi \
- list/index.dcgi \
- lists/index.dcgi \
- listsort/index.dcgi \
- raw/index.cgi \
- search/index.dcgi \
- software/index.gph \
- sort/index.dcgi \
- video/index.gph \
- wizard/step1/index.dcgi \
- wizard/step2/index.dcgi \
- wizard/step3/index.dcgi
-do
- build $SRC ${SRC}/${f}.m4 ${DESTDIR}/$f
-done
-
-# WEIRD
-build $SRC ${SRC}/raw/index.cgi.m4 ${DESTDIR}/debug/index.cgi
-build $SRC ${SRC}/raw/index.cgi.m4 ${DESTDIR}/text/index.cgi
-build $SRC ${SRC}/download/index.dcgi.m4 ${DESTDIR}/direct/index.dcgi
-build $SRC ${SRC}/raw/index.cgi.m4 ${DESTDIR}/links/index.dcgi
-cp pass.tsv ${DESTDIR}/pass.tsv
+ echo "m4 $in >$out ..."
+ m4 $in >$out
+ return
+}
+
+# gophermaps
+
+for f in gophermap \
+ audio/gophermap \
+ books/gophermap \
+ images/gophermap \
+ software/gophermap \
+ video/gophermap
+do
+ build_gophermap $SRC ${SRC}/${f}.m4 ${DST}/$f
+done
+
+# CGI
+
+for f in account \
+ details \
+ download \
+ list \
+ lists \
+ listsort \
+ raw \
+ search \
+ sort \
+ wizard/step1 \
+ wizard/step2 \
+ wizard/step3
+do
+ build_cgi $SRC ${SRC}/${f}.m4 ${CGI}/$f
+done
+
+# special cases
+
+build_cgi $SRC ${SRC}/raw.m4 ${CGI}/debug
+build_cgi $SRC ${SRC}/raw.m4 ${CGI}/text
+build_cgi $SRC ${SRC}/download.m4 ${CGI}/direct
+build_cgi $SRC ${SRC}/raw.m4 ${CGI}/links
+cp ${SRC}/wizard/gophermap ${CGI}/wizard/
+
+cp pass.tsv ${DST}/pass.tsv
exit 0
DELETED pharos/about.txt
Index: pharos/about.txt
==================================================================
--- pharos/about.txt
+++ /dev/null
@@ -1,36 +0,0 @@
-# About PHAROS
-
-Proxy Internet Archive to gopher.
-
-# Search
-
-Example: To find Indian music, excluding commercial samples.
-
- description:(indian) AND -collection:samples_only AND
- mediatype:audio AND subject:music
-
-# Fields
-
-* date:
- YYYY-MM-DD or [YYYY-MM-DD TO YYYY-MM-DD]
-* mediatype:
- audio, collection, data, image, movies, software, texts, or web
-
-# Media type key for search results
-
-* [aud] = audio or etree
-* [col] = collection
-* [dat] = data
-* [img] = image
-* [mov] = movies
-* [bin] = software
-* [txt] = texts
-* [web] = web
-
-See also: <
https://archive.org/advancedsearch.php>
-
-# Limits
-
-This service does not work in the lagrange browser.
-Lagrange URI encodes the search string and also
-mangles items that are not URI encoded.
DELETED pharos/account/index.dcgi
Index: pharos/account/index.dcgi
==================================================================
--- pharos/account/index.dcgi
+++ /dev/null
DELETED pharos/audio/index.gph
Index: pharos/audio/index.gph
==================================================================
--- pharos/audio/index.gph
+++ /dev/null
DELETED pharos/books/index.gph
Index: pharos/books/index.gph
==================================================================
--- pharos/books/index.gph
+++ /dev/null
DELETED pharos/debug/index.cgi
Index: pharos/debug/index.cgi
==================================================================
--- pharos/debug/index.cgi
+++ /dev/null
DELETED pharos/details/index.dcgi
Index: pharos/details/index.dcgi
==================================================================
--- pharos/details/index.dcgi
+++ /dev/null
DELETED pharos/direct/index.dcgi
Index: pharos/direct/index.dcgi
==================================================================
--- pharos/direct/index.dcgi
+++ /dev/null
DELETED pharos/download/index.dcgi
Index: pharos/download/index.dcgi
==================================================================
--- pharos/download/index.dcgi
+++ /dev/null
DELETED pharos/images/index.gph
Index: pharos/images/index.gph
==================================================================
--- pharos/images/index.gph
+++ /dev/null
DELETED pharos/index.gph
Index: pharos/index.gph
==================================================================
--- pharos/index.gph
+++ /dev/null
DELETED pharos/links/index.dcgi
Index: pharos/links/index.dcgi
==================================================================
--- pharos/links/index.dcgi
+++ /dev/null
DELETED pharos/list/index.dcgi
Index: pharos/list/index.dcgi
==================================================================
--- pharos/list/index.dcgi
+++ /dev/null
DELETED pharos/lists/index.dcgi
Index: pharos/lists/index.dcgi
==================================================================
--- pharos/lists/index.dcgi
+++ /dev/null
DELETED pharos/listsort/index.dcgi
Index: pharos/listsort/index.dcgi
==================================================================
--- pharos/listsort/index.dcgi
+++ /dev/null
DELETED pharos/raw/index.cgi
Index: pharos/raw/index.cgi
==================================================================
--- pharos/raw/index.cgi
+++ /dev/null
DELETED pharos/search/index.dcgi
Index: pharos/search/index.dcgi
==================================================================
--- pharos/search/index.dcgi
+++ /dev/null
DELETED pharos/software/index.gph
Index: pharos/software/index.gph
==================================================================
--- pharos/software/index.gph
+++ /dev/null
DELETED pharos/sort/index.dcgi
Index: pharos/sort/index.dcgi
==================================================================
--- pharos/sort/index.dcgi
+++ /dev/null
DELETED pharos/text/index.cgi
Index: pharos/text/index.cgi
==================================================================
--- pharos/text/index.cgi
+++ /dev/null
DELETED pharos/video/index.gph
Index: pharos/video/index.gph
==================================================================
--- pharos/video/index.gph
+++ /dev/null
DELETED pharos/wizard/index.gph
Index: pharos/wizard/index.gph
==================================================================
--- pharos/wizard/index.gph
+++ /dev/null
@@ -1,10 +0,0 @@
- , _
- /| | |
- _/_\_ >_<
- .-\-/. |
- / | | \_ |
- \ \| |\__(/
- /(`---') |
- / / \ |
- _.' \'-' / |
- `----'`=-=' ' hjw
DELETED pharos/wizard/step1/index.dcgi
Index: pharos/wizard/step1/index.dcgi
==================================================================
--- pharos/wizard/step1/index.dcgi
+++ /dev/null
DELETED pharos/wizard/step2/index.dcgi
Index: pharos/wizard/step2/index.dcgi
==================================================================
--- pharos/wizard/step2/index.dcgi
+++ /dev/null
DELETED pharos/wizard/step3/index.dcgi
Index: pharos/wizard/step3/index.dcgi
==================================================================
--- pharos/wizard/step3/index.dcgi
+++ /dev/null
Index: readme.txt
==================================================================
--- readme.txt
+++ readme.txt
@@ -1,47 +1,50 @@
PHAROS by Ben Collver <
[email protected]>
=============================================
+
* Description
* Requirements
* Configuration
* Installation
Description
===========
+
Pharos is a gopher front-end to the Internet Archive written in AWK.
It is named after the light-house island near Alexandria.
-For a description and screenshots from an earlier version, see my
-post linked below.
-
-<
gopher://tilde.pink/1/~bencollver/log/
-2024-07-30-pharos-gopher-frontend-to-internet-archive/>
-
-<
https://gopher.tildeverse.org/tilde.pink/1/~bencollver/log/
-2024-07-30-pharos-gopher-frontend-to-internet-archive/>
-
Requirements
============
-* Runs under the Geomyidae gopher server.
- <
gopher://bitreich.org/1/scm/geomyidae/log.gph>
+
+* Runs under the Gophernicus gopher server.
+ <
gopher://gopher.gophernicus.org/>
* Written in AWK. Tested using busybox awk.
* Commands used:
* env, mktemp, rm, strings, xargs
* curl <
https://curl.se/>
* json2tsv <
gopher://codemadness.org/1/phlog/json2tsv/>
* webdump <
gopher://codemadness.org/1/phlog/webdump/>
Configuration
=============
+
To set configuration variables, edit config.m4
AWK_EXT allows use of non-standard features in gawk and mawk
CACHE_ENABLED caches content in sqlite to reduce API calls
Installation
============
-Installation depends on m4.
+
+Installation depends on m4. I use m4 to achieve a portable module
+system in any major awk. Gawk has @include() but that isn't
+portable. The make.sh script process the source code and generates
+the CGI files based on the settings in config.m4.
+
+ $ sh clean.sh
+ $ sh make.sh
-$ sh clean.sh
-$ sh make.sh
+Copy cgi/* into the CGI directory.
+Copy gopher/* into a gopher directory.
-Copy pharos/ into place
+This separation between CGI and gophermaps is necessary because
+dynamic gophermaps not binary-safe in gophernicus.
ADDED src/account.m4
Index: src/account.m4
==================================================================
--- /dev/null
+++ src/account.m4
@@ -0,0 +1,130 @@
+include(config.m4)dnl
+#!__CMD_AWK__ -f
+
+# __CGIPATH__/account
+#
+# Show details for an account
+
+include(src/config.awk)
+incl(src/api.awk)
+incl(src/cache.awk)
+incl(src/cgi.awk)
+incl(src/sqlite.awk)
+incl(src/util.awk)
+incl(src/web.awk)
+
+function main( acct, cmd, col, cols, count, descr, dir, email,
+ iaout, id, item_server, item_size, output, prefix, signature,
+ str, thumb, title, type, url)
+{
+ count = split(search, parts, "/")
+ acct = parts[1]
+ email = parts[2]
+
+ signature = sprintf("account/%s/%s", acct, email)
+ str = cache_init(signature)
+ if (length(str) > 0) {
+ print str
+ return
+ }
+
+ output = cache_begin()
+ iaout = gettemp()
+
+ url = api_endpoint "/metadata/" acct
+ api_request(url, "GET", iaout)
+
+ # format search results as a gopher directory (menu)
+ cmd = sprintf("%s <%s 2>&1", cmd_json2tsv, iaout)
+ FS = "\t"
+ cols = 0
+ delete col
+ descr = ""
+ dir = ""
+ id = ""
+ item_server = ""
+ item_size = 0
+ title = ""
+ thumb = ""
+ type = ""
+ while ((cmd | getline) > 0) {
+ if ($1 == ".dir" && $2 == "s") {
+ dir = $3
+ } else if ($1 == ".files[].name" && $2 == "s") {
+ if ($3 == "__ia_thumb.jpg") {
+ thumb = $3
+ }
+ } else if ($1 == ".item_size" && $2 == "n") {
+ item_size = $3
+ } else if ($1 == ".metadata.collection" && $2 == "s") {
+ cols++
+ col[cols] = $3
+ } else if ($1 == ".metadata.collection[]" && $2 == "s") {
+ cols++
+ col[cols] = $3
+ } else if ($1 == ".metadata.description" && $2 == "s") {
+ descr = $3
+ } else if ($1 == ".metadata.identifier" && $2 == "s") {
+ id = $3
+ } else if ($1 == ".metadata.mediatype" && $2 == "s") {
+ type = $3
+ } else if ($1 == ".metadata.title" && $2 == "s") {
+ title = $3
+ } else if ($1 == ".server" && $2 == "s") {
+ item_server = $3
+ }
+ }
+ close(cmd)
+
+ if (length(id) == 0) {
+ print_not_found(output, url)
+ cache_end()
+ unlink(iaout)
+ return
+ }
+
+ print "Account: " acct >>output
+ if (length(thumb) > 0) {
+ url = sprintf("http://%s%s/%s", item_server, dir, thumb)
+ printf "IThumbnail\t%s/raw/%s\t%s\t%s\n", cgipath, url,
+ server, port >>output
+ }
+ print_html(output, descr)
+
+ if (length(email) > 0) {
+ prefix = "/sortaddeddate desc"
+ printf "1Uploads\t%s\tsearch?uploader:%s%s\t%s\t%s\n", cgipath,
+ email, prefix, server, port >>output
+ }
+
+ prefix = "/sortaddeddate desc"
+ printf "1Items\t%s/search?anyfield:%s%s\t%s\t%s\n", cgipath,
+ acct, prefix, server, port >>output
+
+ printf "1Lists\t%s/lists?%s\t%s\t%s\n", cgipath, acct, server,
+ port >>output
+
+ print "" >>output
+ printf "%-20s %s\n", "Identifier:", id >>output
+ if (item_size > 0) {
+ printf "%-20s %d\n", "Item Size:", item_size >>output
+ }
+ printf "%-20s %s\n", "Media Type:", type >>output
+
+ print "" >>output
+ printf "hAccount web page\tURL:%s/details?%s\t%s\t%s\n",
+ api_ssl_endpoint, uri_encode(id), server, port >>output
+ printf "1PHAROS\t%s\t%s\t%s\n", docpath, server, port >>output
+
+ cache_end()
+ unlink(iaout)
+ exit 0
+}
+
+BEGIN {
+ config_init()
+
+ cgi_init()
+ uri_encode_init()
+ main()
+}
DELETED src/account/index.dcgi.m4
Index: src/account/index.dcgi.m4
==================================================================
--- src/account/index.dcgi.m4
+++ /dev/null
@@ -1,126 +0,0 @@
-include(config.m4)dnl
-#!__CMD_AWK__ -f
-
-# account/index.dcgi
-#
-# Show details for an account
-
-include(src/config.awk)
-incl(src/api.awk)
-incl(src/cache.awk)
-incl(src/cgi.awk)
-incl(src/sqlite.awk)
-incl(src/util.awk)
-incl(src/web.awk)
-
-function main( acct, cmd, col, cols, descr, dir, email, iaout, id,
- item_server, item_size, output, signature, str, thumb, title,
- type, url)
-{
- acct = parts[3]
- email = search
-
- signature = sprintf("account/%s/%s", acct, email)
- str = cache_init(signature)
- if (length(str) > 0) {
- print str
- return
- }
-
- output = cache_begin()
- iaout = gettemp()
-
- url = api_endpoint "/metadata/" acct
- api_request(url, "GET", iaout)
-
- # format search results as a gopher directory (menu)
- cmd = sprintf("%s <%s 2>&1", cmd_json2tsv, iaout)
- FS = "\t"
- cols = 0
- delete col
- descr = ""
- dir = ""
- id = ""
- item_server = ""
- item_size = 0
- title = ""
- thumb = ""
- type = ""
- while ((cmd | getline) > 0) {
- if ($1 == ".dir" && $2 == "s") {
- dir = $3
- } else if ($1 == ".files[].name" && $2 == "s") {
- if ($3 == "__ia_thumb.jpg") {
- thumb = $3
- }
- } else if ($1 == ".item_size" && $2 == "n") {
- item_size = $3
- } else if ($1 == ".metadata.collection" && $2 == "s") {
- cols++
- col[cols] = $3
- } else if ($1 == ".metadata.collection[]" && $2 == "s") {
- cols++
- col[cols] = $3
- } else if ($1 == ".metadata.description" && $2 == "s") {
- descr = $3
- } else if ($1 == ".metadata.identifier" && $2 == "s") {
- id = $3
- } else if ($1 == ".metadata.mediatype" && $2 == "s") {
- type = $3
- } else if ($1 == ".metadata.title" && $2 == "s") {
- title = $3
- } else if ($1 == ".server" && $2 == "s") {
- item_server = $3
- }
- }
- close(cmd)
-
- if (length(id) == 0) {
- print_not_found(output, url)
- cache_end()
- unlink(iaout)
- return
- }
-
- print "Account: " acct >>output
- if (length(thumb) > 0) {
- url = sprintf("http://%s%s/%s", item_server, dir, thumb)
- printf "[I|Thumbnail|%s/raw/%%09%s|%s|%s]\n", cgipath, url,
- server, port >>output
- }
- print_html(output, descr)
-
- if (length(email) > 0) {
- printf "[1|Uploads|%s/search/sortaddeddate desc%%09uploader:%s|" \
- "%s|%s]\n",
- cgipath, email, server, port >>output
- }
- printf "[1|Items|%s/search/sortaddeddate desc%%09anyfield:%s|%s|%s]\n",
- cgipath, acct, server, port >>output
- printf "[1|Lists|%s/lists/%%09%s|%s|%s]\n", cgipath, acct,
- server, port >>output
-
- print "" >>output
- printf "%-20s %s\n", "Identifier:", id >>output
- if (item_size > 0) {
- printf "%-20s %d\n", "Item Size:", item_size >>output
- }
- printf "%-20s %s\n", "Media Type:", type >>output
-
- print "" >>output
- printf "[h|Account web page|URL:%s/details/%s|%s|%s]\n",
- api_ssl_endpoint, uri_encode(id), server, port >>output
- printf "[1|PHAROS|%s|%s|%s]\n", cgipath, server, port >>output
-
- cache_end()
- unlink(iaout)
- exit 0
-}
-
-BEGIN {
- config_init()
-
- cgi_init()
- uri_encode_init()
- main()
-}
ADDED src/audio/gophermap.m4
Index: src/audio/gophermap.m4
==================================================================
--- /dev/null
+++ src/audio/gophermap.m4
@@ -0,0 +1,21 @@
+include(config.m4)dnl
+# Audio
+
+[1|All Audio|__CGIPATH__/search/sortweek desc/%09mediatype:audio|__SERVER__|__PORT__]
+[1|This Just In|__CGIPATH__/search/sortaddeddate desc/%09mediatype:audio|__SERVER__|__PORT__]
+[1|Live Music Archive|__CGIPATH__/search/sortweek desc/%09collection:etree AND mediatype:collection|__SERVER__|__PORT__]
+[1|Librivox Free Audiobooks|__CGIPATH__/search/sortweek desc/%09collection:librivoxaudio|__SERVER__|__PORT__]
+[1|Grateful Dead|__CGIPATH__/search/sortweek desc/%09collection:GratefulDead|__SERVER__|__PORT__]
+[1|Netlabels|__CGIPATH__/search/sortweek desc/%09collection:netlabels AND mediatype:collection|__SERVER__|__PORT__]
+[1|Old Time Radio|__CGIPATH__/search/sortweek desc/%09collection:oldtimeradio|__SERVER__|__PORT__]
+[1|78 RPMs & Cylinder Recordings|__CGIPATH__/search/sortweek desc/%09collection:78rpm AND mediatype:collection|__SERVER__|__PORT__]
+[1|Audio Books & Poetry|__CGIPATH__/search/sortweek desc/%09collection:audio_bookspoetry|__SERVER__|__PORT__]
+[1|Computers, Technology & Science|__CGIPATH__/search/sortweek desc/%09collection:audio_tech|__SERVER__|__PORT__]
+[1|Music, Arts & Culture|__CGIPATH__/search/sortweek desc/%09collection:audio_music AND mediatype:collection|__SERVER__|__PORT__]
+[1|News & Public Affairs|__CGIPATH__/search/sortweek desc/%09collection:audio_news|__SERVER__|__PORT__]
+[1|Spirituality & Religion|__CGIPATH__/search/sortweek desc/%09collection:audio_religion|__SERVER__|__PORT__]
+[1|Podcasts|__CGIPATH__/search/sortweek desc/%09collection:podcasts|__SERVER__|__PORT__]
+[1|Radio News Archive|__CGIPATH__/search/sortweek desc/%09collection:radio|__SERVER__|__PORT__]
+[1|Long Playing Records|__CGIPATH__/search/sortweek desc/%09collection:album_recordings|__SERVER__|__PORT__]
+[1|Various Cassette Tapes|__CGIPATH__/search/sortweek desc/%09collection:cassettetapes|__SERVER__|__PORT__]
+[1|Audiophile CD Collection|__CGIPATH__/search/sortweek desc/%09collection:acdc AND -collection:samples_only|__SERVER__|__PORT__]
DELETED src/audio/index.gph.m4
Index: src/audio/index.gph.m4
==================================================================
--- src/audio/index.gph.m4
+++ /dev/null
@@ -1,21 +0,0 @@
-include(config.m4)dnl
-# Audio
-
-[1|All Audio|__CGIPATH__/search/sortweek desc/%09mediatype:audio|__SERVER__|__PORT__]
-[1|This Just In|__CGIPATH__/search/sortaddeddate desc/%09mediatype:audio|__SERVER__|__PORT__]
-[1|Live Music Archive|__CGIPATH__/search/sortweek desc/%09collection:etree AND mediatype:collection|__SERVER__|__PORT__]
-[1|Librivox Free Audiobooks|__CGIPATH__/search/sortweek desc/%09collection:librivoxaudio|__SERVER__|__PORT__]
-[1|Grateful Dead|__CGIPATH__/search/sortweek desc/%09collection:GratefulDead|__SERVER__|__PORT__]
-[1|Netlabels|__CGIPATH__/search/sortweek desc/%09collection:netlabels AND mediatype:collection|__SERVER__|__PORT__]
-[1|Old Time Radio|__CGIPATH__/search/sortweek desc/%09collection:oldtimeradio|__SERVER__|__PORT__]
-[1|78 RPMs & Cylinder Recordings|__CGIPATH__/search/sortweek desc/%09collection:78rpm AND mediatype:collection|__SERVER__|__PORT__]
-[1|Audio Books & Poetry|__CGIPATH__/search/sortweek desc/%09collection:audio_bookspoetry|__SERVER__|__PORT__]
-[1|Computers, Technology & Science|__CGIPATH__/search/sortweek desc/%09collection:audio_tech|__SERVER__|__PORT__]
-[1|Music, Arts & Culture|__CGIPATH__/search/sortweek desc/%09collection:audio_music AND mediatype:collection|__SERVER__|__PORT__]
-[1|News & Public Affairs|__CGIPATH__/search/sortweek desc/%09collection:audio_news|__SERVER__|__PORT__]
-[1|Spirituality & Religion|__CGIPATH__/search/sortweek desc/%09collection:audio_religion|__SERVER__|__PORT__]
-[1|Podcasts|__CGIPATH__/search/sortweek desc/%09collection:podcasts|__SERVER__|__PORT__]
-[1|Radio News Archive|__CGIPATH__/search/sortweek desc/%09collection:radio|__SERVER__|__PORT__]
-[1|Long Playing Records|__CGIPATH__/search/sortweek desc/%09collection:album_recordings|__SERVER__|__PORT__]
-[1|Various Cassette Tapes|__CGIPATH__/search/sortweek desc/%09collection:cassettetapes|__SERVER__|__PORT__]
-[1|Audiophile CD Collection|__CGIPATH__/search/sortweek desc/%09collection:acdc AND -collection:samples_only|__SERVER__|__PORT__]
ADDED src/books/gophermap.m4
Index: src/books/gophermap.m4
==================================================================
--- /dev/null
+++ src/books/gophermap.m4
@@ -0,0 +1,22 @@
+include(config.m4)dnl
+# Books
+
+[1|All Texts|__CGIPATH__/search/sortweek desc/%09mediatype:texts AND -access-restricted-item:true|__SERVER__|__PORT__]
+[1|This Just In|__CGIPATH__/search/sortaddeddate desc/%09mediatype:texts AND -access-restricted-item:true|__SERVER__|__PORT__]
+[1|Smithsonian Libraries|__CGIPATH__/search/sortweek desc/%09collection:smithsonian|__SERVER__|__PORT__]
+[1|FEDLINK|__CGIPATH__/search/sortweek desc/%09collection:fedlink AND mediatype:collection|__SERVER__|__PORT__]
+[1|Genealogy|__CGIPATH__/search/sortweek desc/%09collection:genealogy AND mediatype:collection|__SERVER__|__PORT__]
+[1|Lincoln Collection|__CGIPATH__/search/sortweek desc/%09collection:lincolncollection|__SERVER__|__PORT__]
+[1|American Libraries|__CGIPATH__/search/sortweek desc/%09collection:americana AND mediatype:collection AND -access-restricted-item:true|__SERVER__|__PORT__]
+[1|Canadian Libraries|__CGIPATH__/search/sortweek desc/%09collection:toronto AND mediatype:collection AND -access-restricted-item:true|__SERVER__|__PORT__]
+[1|Universal Library|__CGIPATH__/search/sortweek desc/%09collection:universallibrary|__SERVER__|__PORT__]
+[1|Project Gutenberg|__CGIPATH__/search/sortweek desc/%09collection:gutenberg|__SERVER__|__PORT__]
+[1|Children's Library|__CGIPATH__/search/sortweek desc/%09collection:iacl AND -access-restricted-item:true|__SERVER__|__PORT__]
+[1|Biodiversity Heritage Library|__CGIPATH__/search/sortweek desc/%09collection:biodiversity AND mediatype:collection|__SERVER__|__PORT__]
+[1|Books By Language|__CGIPATH__/search/sortweek desc/%09collection:booksbylanguage AND mediatype:collection|__SERVER__|__PORT__]
+[1|Additional Collections|__CGIPATH__/search/sortweek desc/%09collection:additional_collections AND mediatype:collection|__SERVER__|__PORT__]
+[1|The Magazine Rack|__CGIPATH__/search/sortweek desc/%09collection:magazine_rack AND mediatype:collection|__SERVER__|__PORT__]
+[1|The Pulp Magazine Archive|__CGIPATH__/search/sortweek desc/%09collection:pulpmagazinearchive|__SERVER__|__PORT__]
+[1|Newspapers|__CGIPATH__/search/sortweek desc/%09collection:newspapers AND mediatype:collection|__SERVER__|__PORT__]
+[1|Comic Books & Graphic Novels|__CGIPATH__/search/sortweek desc/%09collection:comics AND -access-restricted-item:true|__SERVER__|__PORT__]
+[1|Zines|__CGIPATH__/search/sortweek desc/%09collection:zines AND -access-restricted-item:true|__SERVER__|__PORT__]
DELETED src/books/index.gph.m4
Index: src/books/index.gph.m4
==================================================================
--- src/books/index.gph.m4
+++ /dev/null
@@ -1,22 +0,0 @@
-include(config.m4)dnl
-# Books
-
-[1|All Texts|__CGIPATH__/search/sortweek desc/%09mediatype:texts AND -access-restricted-item:true|__SERVER__|__PORT__]
-[1|This Just In|__CGIPATH__/search/sortaddeddate desc/%09mediatype:texts AND -access-restricted-item:true|__SERVER__|__PORT__]
-[1|Smithsonian Libraries|__CGIPATH__/search/sortweek desc/%09collection:smithsonian|__SERVER__|__PORT__]
-[1|FEDLINK|__CGIPATH__/search/sortweek desc/%09collection:fedlink AND mediatype:collection|__SERVER__|__PORT__]
-[1|Genealogy|__CGIPATH__/search/sortweek desc/%09collection:genealogy AND mediatype:collection|__SERVER__|__PORT__]
-[1|Lincoln Collection|__CGIPATH__/search/sortweek desc/%09collection:lincolncollection|__SERVER__|__PORT__]
-[1|American Libraries|__CGIPATH__/search/sortweek desc/%09collection:americana AND mediatype:collection AND -access-restricted-item:true|__SERVER__|__PORT__]
-[1|Canadian Libraries|__CGIPATH__/search/sortweek desc/%09collection:toronto AND mediatype:collection AND -access-restricted-item:true|__SERVER__|__PORT__]
-[1|Universal Library|__CGIPATH__/search/sortweek desc/%09collection:universallibrary|__SERVER__|__PORT__]
-[1|Project Gutenberg|__CGIPATH__/search/sortweek desc/%09collection:gutenberg|__SERVER__|__PORT__]
-[1|Children's Library|__CGIPATH__/search/sortweek desc/%09collection:iacl AND -access-restricted-item:true|__SERVER__|__PORT__]
-[1|Biodiversity Heritage Library|__CGIPATH__/search/sortweek desc/%09collection:biodiversity AND mediatype:collection|__SERVER__|__PORT__]
-[1|Books By Language|__CGIPATH__/search/sortweek desc/%09collection:booksbylanguage AND mediatype:collection|__SERVER__|__PORT__]
-[1|Additional Collections|__CGIPATH__/search/sortweek desc/%09collection:additional_collections AND mediatype:collection|__SERVER__|__PORT__]
-[1|The Magazine Rack|__CGIPATH__/search/sortweek desc/%09collection:magazine_rack AND mediatype:collection|__SERVER__|__PORT__]
-[1|The Pulp Magazine Archive|__CGIPATH__/search/sortweek desc/%09collection:pulpmagazinearchive|__SERVER__|__PORT__]
-[1|Newspapers|__CGIPATH__/search/sortweek desc/%09collection:newspapers AND mediatype:collection|__SERVER__|__PORT__]
-[1|Comic Books & Graphic Novels|__CGIPATH__/search/sortweek desc/%09collection:comics AND -access-restricted-item:true|__SERVER__|__PORT__]
-[1|Zines|__CGIPATH__/search/sortweek desc/%09collection:zines AND -access-restricted-item:true|__SERVER__|__PORT__]
Index: src/cgi.awk
==================================================================
--- src/cgi.awk
+++ src/cgi.awk
@@ -15,60 +15,38 @@
print "Service admin will reply when granted."
print extra
return
}
-function cgi_init( extra, ip) {
+function cgi_init( extra, ip, item) {
ip = ENVIRON["REMOTE_ADDR"]
-
- search = ARGV[1]
- arguments = ARGV[2]
- traversal = ARGV[5]
- selector = ARGV[6]
-
- if (geomyidae_version < 0.96) {
- input = arguments
-
- # geomyidae 0.69 doesn't populate selector, so do it manually
- path = ENVIRON["PATH_TRANSLATED"]
-
- # remove leading text up through beginning of cgipath
- if (match(path, cgipath)) {
- selector = substr(path, RSTART)
- }
-
- # remove trailing text from beginning of script name
- if (match(selector, ENVIRON["SCRIPT_NAME"])) {
- selector = substr(selector, 1, RSTART - 1)
- }
- selector = selector input
- } else {
- input = traversal
- }
-
- if (length(search) == 0 && match(input, /%09.*/)) {
- # This is a hack to include a search in the URL.
- # everything before %09 is considered arguments
- # everything after %09 is considered the search
- search = substr(input, RSTART + 3, RLENGTH - 3)
- args = substr(input, 0, RSTART - 1)
- } else {
- args = input
- }
-
- # query is everything after ? in the gopher selector
- # this is NOT the same as the gopher search string
- query = args
-
- # parse the path out of the selector
- path = selector
+ selector = ENVIRON["SELECTOR"]
+
+ # If i run `nc -l -p 7070`, then:
+ # lynx -dump '
gopher://127.0.0.1:7070/1/foo?bar'
+ # The nc output is equivalent to: `printf "/foo?bar\r\n"`
+ #
+ # If i run `nc -l -p 7070`, then:
+ # lynx -dump '
gopher://127.0.0.1:7070/7/foo?bar'
+ # The nc output is equivalent to: `printf "/foo\tbar\r\n"`
+ #
+ # Gophernicus sets QUERY_STRING and SEARCHREQUEST to "bar"
+ # in both cases. If the selector contains a ? at the same
+ # time that there is a search string, then the two will differ.
+ #
+ # See: <
https://boston.conman.org/2020/01/06.1>
+
+ search = ENVIRON["QUERY_STRING"]
+ if (search != ENVIRON["SEARCHREQUEST"]) {
+ searchreq = ENVIRON["SEARCHREQUEST"]
+ }
+
+ # parse the path out of the request
+ path = ENVIRON["REQUEST"]
if (substr(path, 1, length(cgipath)) == cgipath) {
path = substr(path, length(cgipath) + 1)
}
- if (match(path, /%09/)) {
- path = substr(path, 1, RSTART - 1)
- }
split(path, parts, "/")
topdir = parts[2]
if (pass_enabled) {
@@ -88,13 +66,13 @@
close(pass_list)
if (blocked) {
extra = ""
if (topdir == "details") {
extra = "\nIn the meanwhile, see:\n\n" \
- sprintf("[h|Web page|URL:%s/details/%s|%s|%s]\n",
+ sprintf("hWeb page\tURL:%s/details/%s\t%s\t%s\n",
api_ssl_endpoint,
- uri_encode(parts[3]),
+ uri_encode(search),
server,
port)
}
block_msg(ip, extra)
exit(0)
@@ -102,16 +80,10 @@
}
return
}
-function gph_encode(str, retval) {
- retval = str
- gsub(/\|/, "\\|", retval)
- return retval
-}
-
function uri_encode_init( i, c) {
for (i = 0; i <= 255; i++) {
c = sprintf("%c", i)
uri_encode_ord[c] = i
uri_encode_tab[i] = c
Index: src/config.awk
==================================================================
--- src/config.awk
+++ src/config.awk
@@ -4,10 +4,11 @@
api_ssl_endpoint = "__API_SSL_ENDPOINT__"
awk_ext = __AWK_EXT__
cache_db = "__CACHE_DB__"
cache_enabled = __CACHE_ENABLED__
cgipath = "__CGIPATH__"
+ docpath = "__DOCPATH__"
cmd_curl = "__CMD_CURL__"
cmd_enc = "__CMD_ENV__"
cmd_json2tsv = "__CMD_JSON2TSV__"
cmd_mktemp = "__CMD_MKTEMP__"
cmd_rm = "__CMD_RM__"
@@ -14,14 +15,13 @@
cmd_sqlite = "__CMD_SQLITE__"
cmd_strings = "__CMD_STRINGS__"
cmd_webdump = "__CMD_WEBDUMP__"
cmd_xargs = "__CMD_XARGS__"
contact = "__CONTACT__"
- geomyidae_version = __GEOMYIDAE_VERSION__
max_bin_size = __MAX_BIN_SIZE__
max_txt_size = __MAX_TXT_SIZE__
pass_enabled = __PASS_ENABLED__
pass_list = "__PASS_LIST__"
server = "__SERVER__"
port = "__PORT__"
return
}
ADDED src/details.m4
Index: src/details.m4
==================================================================
--- /dev/null
+++ src/details.m4
@@ -0,0 +1,230 @@
+include(config.m4)dnl
+#!__CMD_AWK__ -f
+
+# __CGIPATH__/details
+#
+# Show details for an item
+
+include(src/config.awk)
+incl(src/api.awk)
+incl(src/cache.awk)
+incl(src/cgi.awk)
+incl(src/sqlite.awk)
+incl(src/util.awk)
+incl(src/web.awk)
+
+function main( add_date, col, cols, cmd, creator, descr, dir, i,
+ iaout, id, item_id, item_server, item_size, label, language,
+ license, output, pub_date, scanner, signature, str, thumb,
+ title, topic, topics, type, uploader_account, uploader_email, url)
+{
+ item_id = search
+
+ signature = "details/" item_id
+ str = cache_init(signature)
+ if (length(str) > 0) {
+ print str
+ return
+ }
+
+ iaout = gettemp()
+ output = cache_begin()
+
+ url = api_endpoint "/metadata/" item_id
+ api_request(url, "GET", iaout)
+
+ # format search results as a gopher directory (menu)
+ cmd = sprintf("%s <%s 2>&1", cmd_json2tsv, iaout)
+ FS = "\t"
+ cols = 0
+ delete col
+ add_date = ""
+ creators = 0
+ delete creator
+ descr = ""
+ dir = ""
+ id = ""
+ item_server = ""
+ item_size = 0
+ language = ""
+ license = ""
+ pub_date = ""
+ scanner = ""
+ thumb = ""
+ title = ""
+ topics = 0
+ delete topic
+ type = ""
+ uploader_account = ""
+ uploader_email = ""
+ while ((cmd | getline) > 0) {
+ if ($1 == ".dir" && $2 == "s") {
+ dir = $3
+ } else if ($1 == ".files[].name" && $2 == "s") {
+ if ($3 == "__ia_thumb.jpg") {
+ thumb = $3
+ }
+ } else if ($1 == ".item_size" && $2 == "n") {
+ item_size = $3
+ } else if ($1 == ".metadata.addeddate" && $2 == "s") {
+ added_date = $3
+ } else if ($1 == ".metadata.collection" && $2 == "s") {
+ cols++
+ col[cols] = $3
+ } else if ($1 == ".metadata.collection[]" && $2 == "s") {
+ cols++
+ col[cols] = $3
+ } else if ($1 == ".metadata.creator" && $2 == "s") {
+ creators++
+ creator[creators] = $3
+ } else if ($1 == ".metadata.creator[]" && $2 == "s") {
+ creators++
+ creator[creators] = $3
+ } else if ($1 == ".metadata.description" && $2 == "s") {
+ descr = $3
+ } else if ($1 == ".metadata.identifier" && $2 == "s") {
+ id = $3
+ } else if ($1 == ".metadata.language" && $2 == "s") {
+ language = $3
+ } else if ($1 == ".metadata.license" && $2 == "s") {
+ license = licenseurl[$3]
+ } else if ($1 == ".metadata.mediatype" && $2 == "s") {
+ type = $3
+ } else if ($1 == ".metadata.publicdate" && $2 == "s") {
+ pub_date = $3
+ } else if ($1 == ".metadata.scanner" && $2 == "s") {
+ scanner = $3
+ } else if ($1 == ".metadata.subject" && $2 == "s") {
+ topics++
+ topic[topics] = $3
+ } else if ($1 == ".metadata.subject[]" && $2 == "s") {
+ topics++
+ topic[topics] = $3
+ } else if ($1 == ".metadata.title" && $2 == "s") {
+ title = $3
+ } else if ($1 == ".metadata.uploader" && $2 == "s") {
+ uploader_email = $3
+ } else if ($1 == ".server" && $2 == "s") {
+ item_server = $3
+ }
+ }
+ close(cmd)
+
+ if (length(id) == 0) {
+ print_not_found(output, url)
+ cache_end()
+ unlink(iaout)
+ return
+ }
+
+ print shorten(title, 70) >>output
+ if (creators == 1) {
+ label = "by " shorten(creator[1], 70)
+ printf "1%s\t%ssearch?creator:(%s)\t%s\t%s\n", label,
+ cgipath, creator[1], server, port >>output
+ } else if (creators > 1) {
+ printf "\nby:\n" >>output
+ for (i = 1; i <= creators; i++) {
+ label = shorten(creator[i], 70)
+ printf "1%s\t%s/search?creator:(%s)\t%s\t%s\n", label,
+ cgipath, creator[i], server, port >>output
+ }
+ printf "\n" >>output
+ }
+ if (length(thumb) > 0) {
+ url = sprintf("http://%s%s/%s", item_server, dir, thumb)
+ printf "IThumbnail\t%s/raw?%s\t%s\t%s\n", cgipath, url,
+ server, port >>output
+ }
+
+ printf "1Download\t%s/download?%s\t%s\t%s\n", cgipath, item_id,
+ server, port >>output
+ printf "hWeb page\tURL:%s/details/%s\t%s\t%s\n",
+ api_ssl_endpoint, uri_encode(id), server, port >>output
+ print "" >>output
+
+ print_html(output, descr)
+
+ print "" >>output
+ if (length(add_date) > 0) {
+ printf "%-20s %s\n", "Date Added:", add_date >>output
+ }
+ if (pub_date != add_date) {
+ printf "%-20s %s\n", "Date Published:", pub_date >>output
+ }
+ printf "%-20s %s\n", "Identifier:", id >>output
+ if (item_size > 0) {
+ printf "%-20s %d\n", "Item Size:", item_size >>output
+ }
+ if (length(language) > 0) {
+ printf "%-20s %s\n", "Language:", language >>output
+ }
+ if (length(license) > 0) {
+ printf "%-20s %s\n", "License:", license >>output
+ }
+ printf "%-20s %s\n", "Media Type:", type >>output
+
+ if (topics > 0) {
+ print "" >>output
+ print "# Topics" >>output
+ for (i = 1; i <= topics; i++) {
+ label = shorten(topic[i], 40)
+ printf "1%s\t%s/search?subject:(%s)\t%s\t%s\n", label,
+ cgipath, topic[i], server, port >>output
+ }
+ }
+
+ # scrape uploader name from item web page HTML
+ url = api_ssl_endpoint "/details/" item_id
+ api_request(url, "GET", iaout)
+ while ((getline <iaout) > 0) {
+ if (/item-upload-info__uploader-name/ &&
+ match($0, /\/details\/[^"]*"/))
+ {
+ uploader_account = substr($0, RSTART + 9, RLENGTH - 10)
+ }
+ }
+ close(iaout)
+
+ if (cols > 0) {
+ print "" >>output
+ print "# Collections" >>output
+ for (i = 1; i <= cols; i++) {
+ label = shorten(col[i], 40)
+ printf "1%s\t%s/search?collection:(%s)\t%s\t%s\n",
+ label, cgipath, col[i], server, port >>output
+ }
+ }
+
+ print "" >>output
+ print "# Uploaded by" >>output
+ if (length(uploader_account) > 0) {
+ label = shorten(uploader_account, 70)
+ printf "1%s\t%s/account?%s/%s\t%s\t%s\n", label, cgipath,
+ uploader_account, uploader_email, server, port >>output
+ } else {
+ label = shorten(uploader_email, 70)
+ printf "1%s\t%s/search?uploader:%s\t%s\t%s\n", label,
+ cgipath, uploader_email, server, port >>output
+ }
+
+ print "" >>output
+ print "# Similar items" >>output
+ printf "1View similar items\t%s/search?similar:%s\t%s\t%s\n",
+ cgipath, item_id, server, port >>output
+
+ print "" >>output
+ printf "1PHAROS\t%s\t%s\t%s\n", docpath, server, port >>output
+
+ cache_end()
+ unlink(iaout)
+ return
+}
+
+BEGIN {
+ config_init()
+
+ uri_encode_init()
+ cgi_init()
+ main()
+}
DELETED src/details/index.dcgi.m4
Index: src/details/index.dcgi.m4
==================================================================
--- src/details/index.dcgi.m4
+++ /dev/null
@@ -1,230 +0,0 @@
-include(config.m4)dnl
-#!__CMD_AWK__ -f
-
-# details/index.dcgi
-#
-# Show details for an item
-
-include(src/config.awk)
-incl(src/api.awk)
-incl(src/cache.awk)
-incl(src/cgi.awk)
-incl(src/sqlite.awk)
-incl(src/util.awk)
-incl(src/web.awk)
-
-function main( add_date, col, cols, cmd, creator, descr, dir, i,
- iaout, id, item_id, item_server, item_size, label, language,
- license, output, pub_date, scanner, signature, str, thumb, title,
- topic, topics, type, uploader_account, uploader_email, url)
-{
- item_id = parts[3]
-
- signature = "details/" item_id
- str = cache_init(signature)
- if (length(str) > 0) {
- print str
- return
- }
-
- iaout = gettemp()
- output = cache_begin()
-
- url = api_endpoint "/metadata/" item_id
- api_request(url, "GET", iaout)
-
- # format search results as a gopher directory (menu)
- cmd = sprintf("%s <%s 2>&1", cmd_json2tsv, iaout)
- FS = "\t"
- cols = 0
- delete col
- add_date = ""
- creators = 0
- delete creator
- descr = ""
- dir = ""
- id = ""
- item_server = ""
- item_size = 0
- language = ""
- license = ""
- pub_date = ""
- scanner = ""
- thumb = ""
- title = ""
- topics = 0
- delete topic
- type = ""
- uploader_account = ""
- uploader_email = ""
- while ((cmd | getline) > 0) {
- if ($1 == ".dir" && $2 == "s") {
- dir = $3
- } else if ($1 == ".files[].name" && $2 == "s") {
- if ($3 == "__ia_thumb.jpg") {
- thumb = $3
- }
- } else if ($1 == ".item_size" && $2 == "n") {
- item_size = $3
- } else if ($1 == ".metadata.addeddate" && $2 == "s") {
- added_date = $3
- } else if ($1 == ".metadata.collection" && $2 == "s") {
- cols++
- col[cols] = $3
- } else if ($1 == ".metadata.collection[]" && $2 == "s") {
- cols++
- col[cols] = $3
- } else if ($1 == ".metadata.creator" && $2 == "s") {
- creators++
- creator[creators] = $3
- } else if ($1 == ".metadata.creator[]" && $2 == "s") {
- creators++
- creator[creators] = $3
- } else if ($1 == ".metadata.description" && $2 == "s") {
- descr = $3
- } else if ($1 == ".metadata.identifier" && $2 == "s") {
- id = $3
- } else if ($1 == ".metadata.language" && $2 == "s") {
- language = $3
- } else if ($1 == ".metadata.license" && $2 == "s") {
- license = licenseurl[$3]
- } else if ($1 == ".metadata.mediatype" && $2 == "s") {
- type = $3
- } else if ($1 == ".metadata.publicdate" && $2 == "s") {
- pub_date = $3
- } else if ($1 == ".metadata.scanner" && $2 == "s") {
- scanner = $3
- } else if ($1 == ".metadata.subject" && $2 == "s") {
- topics++
- topic[topics] = $3
- } else if ($1 == ".metadata.subject[]" && $2 == "s") {
- topics++
- topic[topics] = $3
- } else if ($1 == ".metadata.title" && $2 == "s") {
- title = $3
- } else if ($1 == ".metadata.uploader" && $2 == "s") {
- uploader_email = $3
- } else if ($1 == ".server" && $2 == "s") {
- item_server = $3
- }
- }
- close(cmd)
-
- if (length(id) == 0) {
- print_not_found(output, url)
- cache_end()
- unlink(iaout)
- return
- }
-
- print shorten(title, 70) >>output
- if (creators == 1) {
- label = "by " shorten(creator[1], 70)
- printf "[1|%s|%s/search/%%09creator:(%s)|%s|%s]\n", label,
- cgipath, creator[1], server, port >>output
- } else if (creators > 1) {
- printf "\nby:\n" >>output
- for (i = 1; i <= creators; i++) {
- label = shorten(creator[i], 70)
- printf "[1|%s|%s/search/%%09creator:(%s)|%s|%s]\n", label,
- cgipath, creator[i], server, port >>output
- }
- printf "\n" >>output
- }
- if (length(thumb) > 0) {
- url = sprintf("http://%s%s/%s", item_server, dir, thumb)
- printf "[I|Thumbnail|%s/raw/%%09%s|%s|%s]\n",
- cgipath, url, server, port >>output
- }
-
- printf "[1|Download|%s/download/%s|%s|%s]\n", cgipath,
- item_id, server, port >>output
- printf "[h|Web page|URL:%s/details/%s|%s|%s]\n",
- api_ssl_endpoint, uri_encode(id), server, port >>output
- print "" >>output
-
- print_html(output, descr)
-
- print "" >>output
- if (length(add_date) > 0) {
- printf "%-20s %s\n", "Date Added:", add_date >>output
- }
- if (pub_date != add_date) {
- printf "%-20s %s\n", "Date Published:", pub_date >>output
- }
- printf "%-20s %s\n", "Identifier:", id >>output
- if (item_size > 0) {
- printf "%-20s %d\n", "Item Size:", item_size >>output
- }
- if (length(language) > 0) {
- printf "%-20s %s\n", "Language:", language >>output
- }
- if (length(license) > 0) {
- printf "%-20s %s\n", "License:", license >>output
- }
- printf "%-20s %s\n", "Media Type:", type >>output
-
- if (topics > 0) {
- print "" >>output
- print "# Topics" >>output
- for (i = 1; i <= topics; i++) {
- label = shorten(topic[i], 40)
- printf "[1|%s|%s/search/%%09subject:(%s)|%s|%s]\n", label,
- cgipath, topic[i], server, port >>output
- }
- }
-
- # scrape uploader name from item web page HTML
- url = api_ssl_endpoint "/details/" item_id
- api_request(url, "GET", iaout)
- while ((getline <iaout) > 0) {
- if (/item-upload-info__uploader-name/ &&
- match($0, /\/details\/[^"]*"/))
- {
- uploader_account = substr($0, RSTART + 9, RLENGTH - 10)
- }
- }
- close(iaout)
-
- if (cols > 0) {
- print "" >>output
- print "# Collections" >>output
- for (i = 1; i <= cols; i++) {
- label = shorten(col[i], 40)
- printf "[1|%s|%s/search/%%09collection:(%s)|%s|%s]\n",
- label, cgipath, col[i], server, port >>output
- }
- }
-
- print "" >>output
- print "# Uploaded by" >>output
- if (length(uploader_account) > 0) {
- label = shorten(uploader_account, 70)
- printf "[1|%s|%s/account/%s%%09%s|%s|%s]\n", label, cgipath,
- uploader_account, uploader_email, server, port >>output
- } else {
- label = shorten(uploader_email, 70)
- printf "[1|%s|%s/search/%%09uploader:%s|%s|%s]\n", label,
- cgipath, uploader_email, server, port >>output
- }
-
- print "" >>output
- print "# Similar items" >>output
- printf "[1|View similar items|%s/search/%%09similar:%s|%s|%s]\n",
- cgipath, item_id, server, port >>output
-
- print "" >>output
- printf "[1|PHAROS|%s|%s|%s]\n", cgipath, server, port >>output
-
- cache_end()
- unlink(iaout)
- return
-}
-
-BEGIN {
- config_init()
-
- uri_encode_init()
- cgi_init()
- main()
-}
ADDED src/download.m4
Index: src/download.m4
==================================================================
--- /dev/null
+++ src/download.m4
@@ -0,0 +1,103 @@
+include(config.m4)dnl
+#!__CMD_AWK__ -f
+
+# __CGIPATH__/download
+#
+# Show file downloads using either direct http or gopher proxy links
+
+include(src/config.awk)
+incl(src/api.awk)
+incl(src/cgi.awk)
+incl(src/util.awk)
+
+function main( cmd, files, file_size, format, iaout, is_archive,
+ is_proxy, item_server, label, mtime, name, source, url)
+{
+ if (topdir == "download") {
+ is_proxy = 1
+ } else {
+ # topdir == "direct"
+ is_proxy = 0
+ }
+
+ iaout = gettemp()
+
+ url = api_endpoint "/metadata/" search
+ api_request(url, "GET", iaout)
+
+ # format search results as a gopher directory (menu)
+ cmd = sprintf("%s <%s 2>&1", cmd_json2tsv, iaout)
+ FS = "\t"
+ dir = ""
+ files = 0
+ delete format
+ item_server = ""
+ delete mtime
+ delete name
+ delete file_size
+ delete source
+
+ while ((cmd | getline) > 0) {
+ if ($1 == ".dir" && $2 == "s") {
+ dir = $3
+ } else if ($1 == ".files[]" && $2 == "o") {
+ files++
+ } else if ($1 == ".files[].format" && $2 == "s") {
+ format[files] = $3
+ } else if ($1 == ".files[].mtime" && $2 == "s") {
+ mtime[files] = $3
+ } else if ($1 == ".files[].name" && $2 == "s") {
+ name[files] = $3
+ } else if ($1 == ".files[].size" && $2 == "s") {
+ file_size[files] = $3
+ } else if ($1 == ".files[].source" && $2 == "s") {
+ source[files] = $3
+ } else if ($1 == ".server" && $2 == "s") {
+ item_server = $3
+ }
+ }
+ close(cmd)
+
+ for (i = 1; i <= files; i++) {
+ label = sprintf("%s %s %s",
+ shorten_left(name[i], 40),
+ strftime("%Y-%m-%d %H:%M", mtime[i]),
+ human_size(file_size[i]))
+ url = sprintf("http://%s%s/%s", item_server, dir, name[i])
+ if (is_proxy) {
+ if (max_bin_size > 0 && file_size[i] > max_bin_size * size_mb) {
+ printf "h%s\tURL:%s\t%s\t%s\n", label, uri_encode(url),
+ server, port
+ } else {
+ printf "1%s\t%s/links?%s\t%s\t%s\n", label, cgipath,
+ url, server, port
+ }
+ } else {
+ printf "h%s\tURL:%s\t%s\t%s\n", label, uri_encode(url),
+ server, port
+ }
+ is_archive = detect_archive(url)
+ if (is_archive) {
+ url = sprintf("http://%s/view_archive.php?archive=%s/%s",
+ item_server, dir, name[i])
+ printf "h%s (View Contents)\tURL:%s\t%s\t%s\n",
+ shorten_left(name[i], 40), uri_encode(url),
+ server, port
+ }
+ }
+
+ printf "1Downloads via http\t%s/direct?%s\t%s\t%s\n", cgipath,
+ search, server, port
+
+ unlink(iaout)
+ exit 0
+}
+
+BEGIN {
+ config_init()
+
+ cgi_init()
+ uri_encode_init()
+ util_init()
+ main()
+}
DELETED src/download/index.dcgi.m4
Index: src/download/index.dcgi.m4
==================================================================
--- src/download/index.dcgi.m4
+++ /dev/null
@@ -1,106 +0,0 @@
-include(config.m4)dnl
-#!__CMD_AWK__ -f
-
-# download/index.dcgi
-#
-# Show file downloads using either direct http or gopher proxy links
-
-include(src/config.awk)
-incl(src/api.awk)
-incl(src/cgi.awk)
-incl(src/util.awk)
-
-function main(cmd, dir, files, file_size, format, iaout, is_archive,
- is_proxy, item_server, label, mtime, name, source, url)
-{
- dir = parts[2]
- item_id = parts[3]
-
- if (dir == "download") {
- is_proxy = 1
- } else {
- # dir == "direct"
- is_proxy = 0
- }
-
- iaout = gettemp()
-
- url = api_endpoint "/metadata/" item_id
- api_request(url, "GET", iaout)
-
- # format search results as a gopher directory (menu)
- cmd = sprintf("%s <%s 2>&1", cmd_json2tsv, iaout)
- FS = "\t"
- dir = ""
- files = 0
- delete format
- item_server = ""
- delete mtime
- delete name
- delete file_size
- delete source
-
- while ((cmd | getline) > 0) {
- if ($1 == ".dir" && $2 == "s") {
- dir = $3
- } else if ($1 == ".files[]" && $2 == "o") {
- files++
- } else if ($1 == ".files[].format" && $2 == "s") {
- format[files] = $3
- } else if ($1 == ".files[].mtime" && $2 == "s") {
- mtime[files] = $3
- } else if ($1 == ".files[].name" && $2 == "s") {
- name[files] = $3
- } else if ($1 == ".files[].size" && $2 == "s") {
- file_size[files] = $3
- } else if ($1 == ".files[].source" && $2 == "s") {
- source[files] = $3
- } else if ($1 == ".server" && $2 == "s") {
- item_server = $3
- }
- }
- close(cmd)
-
- for (i = 1; i <= files; i++) {
- label = sprintf("%s %s %s",
- shorten_left(name[i], 40),
- strftime("%Y-%m-%d %H:%M", mtime[i]),
- human_size(file_size[i]))
- url = sprintf("http://%s%s/%s", item_server, dir, name[i])
- if (is_proxy) {
- if (max_bin_size > 0 && file_size[i] > max_bin_size * size_mb) {
- printf "[h|%s|URL:%s|%s|%s]\n", label, uri_encode(url),
- server, port
- } else {
- printf "[1|%s|%s/links/%%09%s|%s|%s]\n", label, cgipath,
- url, server, port
- }
- } else {
- printf "[h|%s|URL:%s|%s|%s]\n", label, uri_encode(url),
- server, port
- }
- is_archive = detect_archive(url)
- if (is_archive) {
- url = sprintf("http://%s/view_archive.php?archive=%s/%s",
- item_server, dir, name[i])
- printf "[h|%s (View Contents)|URL:%s|%s|%s]\n",
- shorten_left(name[i], 40), uri_encode(url),
- server, port
- }
- }
-
- printf "[1|Downloads via http|%s/direct/%s|%s|%s]\n", cgipath,
- item_id, server, port
-
- unlink(iaout)
- exit 0
-}
-
-BEGIN {
- config_init()
-
- cgi_init()
- uri_encode_init()
- util_init()
- main()
-}
ADDED src/gophermap.m4
Index: src/gophermap.m4
==================================================================
--- /dev/null
+++ src/gophermap.m4
@@ -0,0 +1,26 @@
+include(config.m4)dnl
+i .n. |
+i /___\ _.---. \ _ /
+i [|||] (_._ ) )--;_) =-
+i [___] '---'.__,' \
+i }-=-{ |
+i |-" |
+i |.-"| p
+i~^=~^~-|_.-|~^-~^~ ~^~ -^~^~|\ ~^-~^~-
+i^ .=.| _.|__ ^ ~ /| \
+i ~ /:. \" _|_/\ ~ /_|__\ ^
+i.-/::. | |""|-._ ^ ~~~~
+i `===-'-----'""` '-. ~
+i PHAROS __.-' ^
+i
+iGopher proxy to Internet Archive.
+i
+7Search __CGIPATH__/search __SERVER__ __PORT__
+1Advanced Search __CGIPATH__/wizard/step1 __SERVER__ __PORT__
+1Books __DOCPATH__/books/ __SERVER__ __PORT__
+1Video __DOCPATH__/video/ __SERVER__ __PORT__
+1Audio __DOCPATH__/audio/ __SERVER__ __PORT__
+1Software __DOCPATH__/software/ __SERVER__ __PORT__
+1Images __DOCPATH__/images/ __SERVER__ __PORT__
+0About PHAROS __DOCPATH__/about.txt __SERVER__ __PORT__
+hSource Code URL:
https://chiselapp.com/user/bencollver/repository/pharos __SERVER__ __PORT__
ADDED src/images/gophermap.m4
Index: src/images/gophermap.m4
==================================================================
--- /dev/null
+++ src/images/gophermap.m4
@@ -0,0 +1,14 @@
+include(config.m4)dnl
+# Images
+
+[1|All Images|__CGIPATH__/search/sortweek desc/%09mediatype:image|__SERVER__|__PORT__]
+[1|This Just In|__CGIPATH__/search/sortaddeddate desc/%09mediatype:image|__SERVER__|__PORT__]
+[1|Metropolitan Museum of Art|__CGIPATH__/search/sortweek desc/%09collection:metropolitanmuseumofart-gallery|__SERVER__|__PORT__]
+[1|Cleveland Museum of Art|__CGIPATH__/search/sortweek desc/%09collection:clevelandart|__SERVER__|__PORT__]
+[1|Flickr Commons|__CGIPATH__/search/sortweek desc/%09collection:flickrcommons|__SERVER__|__PORT__]
+[1|Occupy Wallstreet Flickr|__CGIPATH__/search/sortweek desc/%09collection:flickr-ows|__SERVER__|__PORT__]
+[1|Cover Art|__CGIPATH__/search/sortweek desc/%09collection:coverartarchive|__SERVER__|__PORT__]
+[1|USGS Maps|__CGIPATH__/search/sortweek desc/%09collection:maps_usgs|__SERVER__|__PORT__]
+[1|NASA Images|__CGIPATH__/search/sortweek desc/%09collection:nasa|__SERVER__|__PORT__]
+[1|Solar System Collection|__CGIPATH__/search/sortweek desc/%09collection:solarsystemcollection|__SERVER__|__PORT__]
+[1|Ames Research Center|__CGIPATH__/search/sortweek desc/%09collection:amesresearchcenterimagelibrary|__SERVER__|__PORT__]
DELETED src/images/index.gph.m4
Index: src/images/index.gph.m4
==================================================================
--- src/images/index.gph.m4
+++ /dev/null
@@ -1,14 +0,0 @@
-include(config.m4)dnl
-# Images
-
-[1|All Images|__CGIPATH__/search/sortweek desc/%09mediatype:image|__SERVER__|__PORT__]
-[1|This Just In|__CGIPATH__/search/sortaddeddate desc/%09mediatype:image|__SERVER__|__PORT__]
-[1|Metropolitan Museum of Art|__CGIPATH__/search/sortweek desc/%09collection:metropolitanmuseumofart-gallery|__SERVER__|__PORT__]
-[1|Cleveland Museum of Art|__CGIPATH__/search/sortweek desc/%09collection:clevelandart|__SERVER__|__PORT__]
-[1|Flickr Commons|__CGIPATH__/search/sortweek desc/%09collection:flickrcommons|__SERVER__|__PORT__]
-[1|Occupy Wallstreet Flickr|__CGIPATH__/search/sortweek desc/%09collection:flickr-ows|__SERVER__|__PORT__]
-[1|Cover Art|__CGIPATH__/search/sortweek desc/%09collection:coverartarchive|__SERVER__|__PORT__]
-[1|USGS Maps|__CGIPATH__/search/sortweek desc/%09collection:maps_usgs|__SERVER__|__PORT__]
-[1|NASA Images|__CGIPATH__/search/sortweek desc/%09collection:nasa|__SERVER__|__PORT__]
-[1|Solar System Collection|__CGIPATH__/search/sortweek desc/%09collection:solarsystemcollection|__SERVER__|__PORT__]
-[1|Ames Research Center|__CGIPATH__/search/sortweek desc/%09collection:amesresearchcenterimagelibrary|__SERVER__|__PORT__]
DELETED src/index.gph.m4
Index: src/index.gph.m4
==================================================================
--- src/index.gph.m4
+++ /dev/null
@@ -1,26 +0,0 @@
-include(config.m4)dnl
- .n. |
- /___\ _.---. \ _ /
- [|||] (_._ ) )--;_) =-
- [___] '---'.__,' \
- }-=-{ |
- |-" |
- |.-"| p
-~^=~^~-|_.-|~^-~^~ ~^~ -^~^~|\ ~^-~^~-
-^ .=.| _.|__ ^ ~ /| \
- ~ /:. \" _|_/\ ~ /_|__\ ^
-.-/::. | |""|-._ ^ ~~~~
- `===-'-----'""` '-. ~
- PHAROS __.-' ^
-
-Gopher proxy to Internet Archive.
-
-[7|Search|__CGIPATH__/search/|__SERVER__|__PORT__]
-[1|Advanced Search|__CGIPATH__/wizard/step1|__SERVER__|__PORT__]
-[1|Books|__CGIPATH__/books/|__SERVER__|__PORT__]
-[1|Video|__CGIPATH__/video/|__SERVER__|__PORT__]
-[1|Audio|__CGIPATH__/audio/|__SERVER__|__PORT__]
-[1|Software|__CGIPATH__/software/|__SERVER__|__PORT__]
-[1|Images|__CGIPATH__/images/|__SERVER__|__PORT__]
-[0|About PHAROS|__CGIPATH__/about.txt|__SERVER__|__PORT__]
-[h|Source Code|URL:
https://chiselapp.com/user/bencollver/repository/pharos|__SERVER__|__PORT__]
ADDED src/list.m4
Index: src/list.m4
==================================================================
--- /dev/null
+++ src/list.m4
@@ -0,0 +1,229 @@
+include(config.m4)dnl
+#!__CMD_AWK__ -f
+
+# __CGIPATH__/list
+#
+# Show a specific list
+
+include(src/config.awk)
+incl(src/api.awk)
+incl(src/cache.awk)
+incl(src/cgi.awk)
+incl(src/sqlite.awk)
+incl(src/util.awk)
+
+function main( acct, client_url, cmd, count, creator, iaout, id,
+ is_private, items, label, list_id, name, name_slug, numfound,
+ order, order_name, order_names, order_param, output, page, pages,
+ prefix, rows, sort_param, signature, str, title, type, url,
+ user_query)
+{
+ order_names["creator"] = "creatorSorter"
+ order_names["date"] = "date"
+ order_names["title"] = "titleSorter"
+ order_names["week"] = "week"
+
+ rows = 15
+ page = 1
+ order = ""
+ sort_param = ""
+
+ # parse out page number and sort order
+ numfound = 1
+ while (numfound == 1) {
+ if (match(search, /\/rows[0-9][0-9]*$/)) {
+ rows = substr(search, RSTART + 5)
+ search = substr(search, 1, RSTART - 1)
+ } else if (match(search, /\/page[0-9][0-9]*$/)) {
+ page = substr(search, RSTART + 5)
+ search = substr(search, 1, RSTART - 1)
+ } else if (match(search, /\/sort[^\/]*$/)) {
+ if (length(order) == 0) {
+ sort_param = substr(search, RSTART + 1)
+ order = substr(sort_param, 5)
+ }
+ search = substr(search, 1, RSTART - 1)
+ } else {
+ numfound = 0
+ }
+ }
+
+ split(search, parts, "/")
+ acct = parts[1]
+ list_id = parts[2]
+
+ signature = sprintf("list/%s/%s", input, search)
+ str = cache_init(signature)
+ if (length(str) > 0) {
+ print str
+ return
+ }
+
+ output = cache_begin()
+ iaout = gettemp()
+
+ url = api_ssl_endpoint "/services/users/" acct "/lists/" list_id
+ api_request(url, "GET", iaout)
+
+ # fetch identifiers of list members
+
+ cmd = sprintf("%s <%s 2>&1", cmd_json2tsv, iaout)
+ FS = "\t"
+ id = ""
+ is_private = 0
+ name = ""
+ numfound = 0
+ user_query = ""
+
+ while ((cmd | getline) > 0) {
+ if ($1 == ".value.list_name" && $2 == "s") {
+ name = $3
+ is_private = 0
+ } else if ($1 == ".value.is_private" && $2 == "b") {
+ if ($3 == "true") {
+ is_private = 1
+ }
+ } else if ($1 == ".value.members[].identifier" && $2 == "s") {
+ if (!is_private) {
+ id = $3
+ numfound++
+ if (length(user_query) == 0) {
+ user_query = id
+ } else {
+ user_query = user_query "+OR+" id
+ }
+ }
+ }
+ }
+ close(cmd)
+ unlink(iaout)
+
+ # get metadata of list member items
+
+ name_slug = uri_encode(name)
+ gsub(/%20/, "-", name_slug)
+ client_url = api_ssl_endpoint "/details/" acct "/lists/" list_id \
+ "/" name_slug
+ order_param = ""
+ if (length(order) > 0) {
+ split(order, parts, " ")
+ order_name = order_names[parts[1]]
+ if (length(order_name) > 0) {
+ if (parts[2] == "desc") {
+ client_url = client_url "?-" parts[1]
+ } else {
+ client_url = client_url "?" parts[1]
+ }
+ order_param = "&sort=" uri_encode(order_name ":" parts[2])
+ }
+ }
+ url = api_ssl_endpoint "/services/search/beta/page_production/" \
+ "?user_query=identifier:(" user_query ")" \
+ "&hits_per_page=" rows \
+ "&page=" page \
+ order_param \
+ "&aggregations=false" \
+ "&client_url=" client_url
+ api_request(url, "GET", iaout)
+
+ pages = int(numfound / rows)
+ if (numfound % rows != 0) {
+ pages++
+ }
+
+ # format as a gopher directory (menu)
+
+ print acct "'s Lists" >>output
+ print "" >>output
+ printf "# List: %s, page %d of %d\n", name, page, pages >>output
+ print "" >>output
+
+ cmd = sprintf("%s <%s 2>&1", cmd_json2tsv, iaout)
+ FS = "\t"
+ count = 0
+ creator = ""
+ id = ""
+ title = ""
+ type = ""
+ while ((cmd | getline) > 0) {
+ if ($1 == ".response.body.hits.hits[].fields.creator[]" &&
+ $2 == "s" && length(creator) == 0)
+ {
+ creator = $3
+ } else if ($1 == ".response.body.hits.hits[].fields.identifier" &&
+ $2 == "s")
+ {
+ id = $3
+ } else if ($1 == ".response.body.hits.hits[].fields.mediatype" &&
+ $2 == "s")
+ {
+ type = $3
+ } else if ($1 == ".response.body.hits.hits[].fields.title" &&
+ $2 == "s")
+ {
+ title = $3
+ } else if ($1 == ".response.body.hits.hits[]._score") {
+ # the _score field happens to be toward the end of each item
+ if (length(title) > 0) {
+ if (length(creator) > 0) {
+ label = sprintf("[%s] %s by %s", mediatype[type],
+ shorten(title, 40), shorten(creator, 18))
+ } else {
+ label = sprintf("[%s] %s", mediatype[type],
+ shorten(title, 58))
+ }
+ printf "1%s\t%s/details?%s\t%s\t%s\n", label, cgipath, id,
+ server, port >>output
+ count++
+ }
+ creator = ""
+ descr = ""
+ id = ""
+ type = ""
+ }
+ }
+ close(cmd)
+
+ print "" >>output
+
+ # only show "page back" if the user is past page 1
+ if (page > 1) {
+ prefix = sprintf("/page%d/rows%d/%s", page - 1, rows, sort_param)
+ printf "1[<<] Page %d\t%s/list?%s/%d%s\t%s\t%s\n",
+ page - 1, cgipath, acct, list_id, prefix, server,
+ port >>output
+ }
+
+ # only show "next page" if the current page is completely full
+ if (count == rows) {
+ prefix = sprintf("/page%d/rows%d/%s", page + 1, rows, sort_param)
+ printf "1[>>] Page %d\t%s/list?%s/%d%s\t%s\t%s\n",
+ page + 1, cgipath, acct, list_id, prefix, server,
+ port >>output
+ }
+
+ # only show "sort" if there's more than one item to sort
+ if (numfound > 1) {
+ printf "1[^v] Sort\t%s/listsort?%s/%d\t%s\t%s\n", cgipath,
+ acct, list_id, server, port >>output
+ }
+
+ printf "1Account %s\t%s/account?%s\t%s\t%s\n", acct, cgipath,
+ acct, server, port >>output
+
+ print "" >>output
+ printf "1PHAROS\t%s\t%s\t%s\n", docpath, server, port >>output
+
+ cache_end()
+ unlink(iaout)
+ exit 0
+}
+
+BEGIN {
+ config_init()
+
+ cgi_init()
+ uri_encode_init()
+ util_init()
+ main()
+}
DELETED src/list/index.dcgi.m4
Index: src/list/index.dcgi.m4
==================================================================
--- src/list/index.dcgi.m4
+++ /dev/null
@@ -1,220 +0,0 @@
-include(config.m4)dnl
-#!__CMD_AWK__ -f
-
-# list/index.dcgi
-#
-# Show a specific list
-
-include(src/config.awk)
-incl(src/api.awk)
-incl(src/cache.awk)
-incl(src/cgi.awk)
-incl(src/sqlite.awk)
-incl(src/util.awk)
-
-function main( acct, client_url, cmd, count, creator, iaout, id,
- is_private, items, label, list_id, name, name_slug, numfound,
- order, order_name, order_names, order_param, output, page, pages,
- rows, query, sort_param, signature, str, title, type, url)
-{
- order_names["creator"] = "creatorSorter"
- order_names["date"] = "date"
- order_names["title"] = "titleSorter"
- order_names["week"] = "week"
-
- rows = 15
- page = 1
- order = ""
- sort_param = ""
-
- # parse out page number and sort order
- for (i in parts) {
- if (parts[i] ~ /^rows[0-9][0-9]*$/) {
- rows = substr(parts[i], 5)
- } else if (parts[i] ~ /^page[0-9][0-9]*$/) {
- page = substr(parts[i], 5)
- } else if (parts[i] ~ /^sort/) {
- if (length(order) == 0) {
- sort_param = parts[i]
- order = substr(parts[i], 5)
- }
- }
- }
-
- split(search, parts, "/")
- acct = parts[1]
- list_id = parts[2]
-
- signature = sprintf("list/%s/%s", input, search)
- str = cache_init(signature)
- if (length(str) > 0) {
- print str
- return
- }
-
- output = cache_begin()
- iaout = gettemp()
-
- url = api_ssl_endpoint "/services/users/" acct "/lists/" list_id
- api_request(url, "GET", iaout)
-
- # fetch identifiers of list members
-
- cmd = sprintf("%s <%s 2>&1", cmd_json2tsv, iaout)
- FS = "\t"
- id = ""
- is_private = 0
- name = ""
- numfound = 0
- query = ""
-
- while ((cmd | getline) > 0) {
- if ($1 == ".value.list_name" && $2 == "s") {
- name = $3
- is_private = 0
- } else if ($1 == ".value.is_private" && $2 == "b") {
- if ($3 == "true") {
- is_private = 1
- }
- } else if ($1 == ".value.members[].identifier" && $2 == "s") {
- if (!is_private) {
- id = $3
- numfound++
- if (length(query) == 0) {
- query = id
- } else {
- query = query "+OR+" id
- }
- }
- }
- }
- close(cmd)
- unlink(iaout)
-
- # get metadata of list member items
-
- name_slug = uri_encode(name)
- gsub(/%20/, "-", name_slug)
- client_url = api_ssl_endpoint "/details/" acct "/lists/" list_id \
- "/" name_slug
- order_param = ""
- if (length(order) > 0) {
- split(order, parts, " ")
- order_name = order_names[parts[1]]
- if (length(order_name) > 0) {
- if (parts[2] == "desc") {
- client_url = client_url "?-" parts[1]
- } else {
- client_url = client_url "?" parts[1]
- }
- order_param = "&sort=" uri_encode(order_name ":" parts[2])
- }
- }
- url = api_ssl_endpoint "/services/search/beta/page_production/" \
- "?user_query=identifier:(" query ")" \
- "&hits_per_page=" rows \
- "&page=" page \
- order_param \
- "&aggregations=false" \
- "&client_url=" client_url
- api_request(url, "GET", iaout)
-
- pages = int(numfound / rows)
- if (numfound % rows != 0) {
- pages++
- }
-
- # format as a gopher directory (menu)
-
- print acct "'s Lists" >>output
- print "" >>output
- printf "# List: %s, page %d of %d\n", name, page, pages >>output
- print "" >>output
-
- cmd = sprintf("%s <%s 2>&1", cmd_json2tsv, iaout)
- FS = "\t"
- count = 0
- creator = ""
- id = ""
- title = ""
- type = ""
- while ((cmd | getline) > 0) {
- if ($1 == ".response.body.hits.hits[].fields.creator[]" &&
- $2 == "s" && length(creator) == 0)
- {
- creator = $3
- } else if ($1 == ".response.body.hits.hits[].fields.identifier" &&
- $2 == "s")
- {
- id = $3
- } else if ($1 == ".response.body.hits.hits[].fields.mediatype" &&
- $2 == "s")
- {
- type = $3
- } else if ($1 == ".response.body.hits.hits[].fields.title" &&
- $2 == "s")
- {
- title = $3
- } else if ($1 == ".response.body.hits.hits[]._score") {
- # the _score field happens to be toward the end of each item
- if (length(title) > 0) {
- if (length(creator) > 0) {
- label = sprintf("[%s] %s by %s", mediatype[type],
- gph_encode(shorten(title, 40)), shorten(creator, 18))
- } else {
- label = sprintf("[%s] %s", mediatype[type],
- gph_encode(shorten(title, 58)))
- }
- printf "[1|%s|%s/details/%s|%s|%s]\n", label, cgipath, id,
- server, port >>output
- count++
- }
- creator = ""
- descr = ""
- id = ""
- type = ""
- }
- }
- close(cmd)
-
- print "" >>output
-
- # only show "page back" if the user is past page 1
- if (page > 1) {
- printf "[1|[<<] Page %d|%s/list/page%d/rows%d/%s%%09%s/%d|%s|%s]\n",
- page - 1, cgipath, page - 1, rows, sort_param,
- acct, list_id, server, port >>output
- }
-
- # only show "next page" if the current page is completely full
- if (count == rows) {
- printf "[1|[>>] Page %d|%s/list/page%d/rows%d/%s%%09%s/%d|%s|%s]\n",
- page + 1, cgipath, page + 1, rows, sort_param,
- acct, list_id, server, port >>output
- }
-
- # only show "sort" if there's more than one item to sort
- if (numfound > 1) {
- printf "[1|[^v] Sort|%s/listsort/%%09%s/%d|%s|%s]\n", cgipath,
- acct, list_id, server, port >>output
- }
-
- printf "[1|Account %s|%s/account/%s|%s|%s]\n", acct, cgipath,
- acct, server, port >>output
-
- print "" >>output
- printf "[1|PHAROS|%s|%s|%s]\n", cgipath, server, port >>output
-
- cache_end()
- unlink(iaout)
- exit 0
-}
-
-BEGIN {
- config_init()
-
- cgi_init()
- uri_encode_init()
- util_init()
- main()
-}
ADDED src/lists.m4
Index: src/lists.m4
==================================================================
--- /dev/null
+++ src/lists.m4
@@ -0,0 +1,113 @@
+include(config.m4)dnl
+#!__CMD_AWK__ -f
+
+# __CGIPATH__/lists
+#
+# Show a list of a user's lists
+
+include(src/config.awk)
+incl(src/api.awk)
+incl(src/cache.awk)
+incl(src/cgi.awk)
+incl(src/sqlite.awk)
+incl(src/util.awk)
+
+function main( cmd, count, fields, iaout, i, id, is_private, item,
+ item_count, item_id, label, name, output, record, records,
+ signature, str, url)
+{
+ signature = sprintf("%s/lists", search)
+ str = cache_init(signature)
+ if (length(str) > 0) {
+ print str
+ return
+ }
+
+ output = cache_begin()
+ iaout = gettemp()
+
+ url = api_ssl_endpoint "/services/users/" search "/lists"
+ api_request(url, "GET", iaout)
+
+ # format list as a gopher directory (menu)
+ cmd = sprintf("%s <%s 2>&1", cmd_json2tsv, iaout)
+ FS = "\t"
+ count = 0
+ delete fields[0]
+ id = 0
+ is_private = 0
+ item = ""
+ item_count = 0
+ item_id = ""
+ name = ""
+ record = ""
+ delete records[0]
+
+ while ((cmd | getline) > 0) {
+ if ($1 == ".value[]" && $2 == "o") {
+ # add information for previous list
+ if (!is_private && length(name) > 0 && item_count > 0) {
+ label = shorten_left(name, 50)
+ item = sprintf("1%4d Items: %-50s\t%s/list?%s/%d\t%s\t%s",
+ item_count, label, cgipath, search, id, server, port)
+ record = label "\v" id "\v" item
+ count++
+ records[count] = record
+ }
+ } else if ($1 == ".value[].list_name" && $2 == "s") {
+ name = $3
+ id = 0
+ is_private = 0
+ item_count = 0
+ } else if ($1 == ".value[].is_private" && $2 == "b") {
+ if ($3 == "true") {
+ is_private = 1
+ }
+ } else if ($1 == ".value[].id" && $2 == "n") {
+ id = $3
+ } else if ($1 == ".value[].members[].identifier" && $2 == "s") {
+ item_count++
+ }
+ }
+ close(cmd)
+
+ # add information for previous list
+ if (!is_private && length(name) > 0 && item_count > 0) {
+ label = shorten_left(name, 50)
+ item = sprintf("1%4d Items: %-50s\t%s/list?%s/%d\t%s\t%s",
+ item_count, label, cgipath, search, id, server, port)
+ record = label "\v" id "\v" item
+ count++
+ records[count] = record
+ }
+
+ # sort lists by label and id
+ if (count > 0) {
+ hsort(records, count)
+ }
+
+ print search "'s Lists" >>output
+ print "" >>output
+
+ for (i = 1; i <= count; i++) {
+ record = records[i]
+ split(record, fields, /\v/)
+ item = fields[3]
+ print item >>output
+ }
+
+ print "" >>output
+ printf "1PHAROS\t%s\t%s\t%s\n", docpath, server, port >>output
+
+ cache_end()
+ unlink(iaout)
+ return
+}
+
+BEGIN {
+ config_init()
+
+ cgi_init()
+ uri_encode_init()
+ main()
+}
DELETED src/lists/index.dcgi.m4
Index: src/lists/index.dcgi.m4
==================================================================
--- src/lists/index.dcgi.m4
+++ /dev/null
@@ -1,111 +0,0 @@
-include(config.m4)dnl
-#!__CMD_AWK__ -f
-
-# lists/index.dcgi
-#
-# Show a list of a user's lists
-
-include(src/config.awk)
-incl(src/api.awk)
-incl(src/cache.awk)
-incl(src/cgi.awk)
-incl(src/sqlite.awk)
-incl(src/util.awk)
-
-function main( cmd, count, fields, iaout, i, id, is_private, item,
- item_count, item_id, label, name, output, record, records,
- signature, str, url)
-{
- signature = sprintf("%s/lists", search)
- str = cache_init(signature)
- if (length(str) > 0) {
- print str
- return
- }
-
- output = cache_begin()
- iaout = gettemp()
-
- url = api_ssl_endpoint "/services/users/" search "/lists"
- api_request(url, "GET", iaout)
-
- # format list as a gopher directory (menu)
- cmd = sprintf("%s <%s 2>&1", cmd_json2tsv, iaout)
- FS = "\t"
- count = 0
- delete fields[0]
- id = 0
- is_private = 0
- item = ""
- item_count = 0
- item_id = ""
- name = ""
- record = ""
- delete records[0]
-
- while ((cmd | getline) > 0) {
- if ($1 == ".value[]" && $2 == "o") {
- # add information for previous list
- if (!is_private && length(name) > 0 && item_count > 0) {
- label = shorten_left(name, 50)
- item = sprintf("[1|%4d Items: %-50s|%s/list/%%09%s/%d|%s|%s]",
- item_count, label, cgipath, search, id, server, port)
- record = label "\t" id "\t" item
- count++
- records[count] = record
- }
- } else if ($1 == ".value[].list_name" && $2 == "s") {
- name = $3
- id = 0
- is_private = 0
- item_count = 0
- } else if ($1 == ".value[].is_private" && $2 == "b") {
- if ($3 == "true") {
- is_private = 1
- }
- } else if ($1 == ".value[].id" && $2 == "n") {
- id = $3
- } else if ($1 == ".value[].members[].identifier" && $2 == "s") {
- item_count++
- }
- }
- close(cmd)
-
- # add information for previous list
- if (!is_private && length(name) > 0 && item_count > 0) {
- label = shorten_left(name, 50)
- item = sprintf("[1|%4d Items: %-50s|%s/list/%%09%s/%d|%s|%s]",
- item_count, label, cgipath, search, id, server, port)
- record = label "\t" id "\t" item
- count++
- records[count] = record
- }
-
- # sort lists by label and id
- hsort(records, count)
-
- print search "'s Lists" >>output
- print "" >>output
-
- for (i = 1; i <= count; i++) {
- record = records[i]
- split(record, fields, /\t/)
- item = fields[3]
- print item >>output
- }
-
- print "" >>output
- printf "[1|PHAROS|%s|%s|%s]\n", cgipath, server, port >>output
-
- cache_end()
- unlink(iaout)
- return
-}
-
-BEGIN {
- config_init()
-
- cgi_init()
- uri_encode_init()
- main()
-}
ADDED src/listsort.m4
Index: src/listsort.m4
==================================================================
--- /dev/null
+++ src/listsort.m4
@@ -0,0 +1,54 @@
+include(config.m4)dnl
+#!__CMD_AWK__ -f
+
+# __CGIPATH__/listsort
+#
+# Change list sort order
+
+include(src/config.awk)
+incl(src/cgi.awk)
+
+function main( acct, i, lbl, list_id, opt) {
+ lbl[1] = "Relevance"
+ opt[1] = ""
+ lbl[2] = "Weekly views [^]"
+ opt[2] = "week asc"
+ lbl[3] = "Weekly views [v]"
+ opt[3] = "week desc"
+ lbl[4] = "Title [^]"
+ opt[4] = "title asc"
+ lbl[5] = "Title [v]"
+ opt[5] = "title desc"
+ lbl[6] = "Date published [^]"
+ opt[6] = "date asc"
+ lbl[7] = "Date published [v]"
+ opt[7] = "date desc"
+ lbl[8] = "Creator [^]"
+ opt[8] = "creator asc"
+ lbl[9] = "Creator [v]"
+ opt[9] = "creator desc"
+
+ split(search, parts, "/")
+ acct = parts[1]
+ list_id = parts[2]
+
+ print "# Sort by"
+ print ""
+ for (i = 1; i < 10; i++) {
+ if (length(opt[i]) == 0) {
+ printf "1%s\t%s/list?%s/%d\t%s\t%s\n", lbl[i], cgipath,
+ acct, list_id, server, port
+ } else {
+ printf "1%s\t%s/list?%s/%d/sort%s\t%s\t%s\n", lbl[i],
+ cgipath, acct, list_id, opt[i], server, port
+ }
+ }
+ exit 0
+}
+
+BEGIN {
+ config_init()
+
+ cgi_init()
+ main()
+}
DELETED src/listsort/index.dcgi.m4
Index: src/listsort/index.dcgi.m4
==================================================================
--- src/listsort/index.dcgi.m4
+++ /dev/null
@@ -1,54 +0,0 @@
-include(config.m4)dnl
-#!__CMD_AWK__ -f
-
-# listsort/index.dcgi
-#
-# Change list sort order
-
-include(src/config.awk)
-incl(src/cgi.awk)
-
-function main( acct, i, lbl, list_id, opt) {
- lbl[1] = "Relevance"
- opt[1] = ""
- lbl[2] = "Weekly views [^]"
- opt[2] = "week asc"
- lbl[3] = "Weekly views [v]"
- opt[3] = "week desc"
- lbl[4] = "Title [^]"
- opt[4] = "title asc"
- lbl[5] = "Title [v]"
- opt[5] = "title desc"
- lbl[6] = "Date published [^]"
- opt[6] = "date asc"
- lbl[7] = "Date published [v]"
- opt[7] = "date desc"
- lbl[8] = "Creator [^]"
- opt[8] = "creator asc"
- lbl[9] = "Creator [v]"
- opt[9] = "creator desc"
-
- split(search, parts, "/")
- acct = parts[1]
- list_id = parts[2]
-
- print "# Sort by"
- print ""
- for (i = 1; i < 10; i++) {
- if (length(opt[i]) == 0) {
- printf "[1|%s|%s/list/%%09%s/%d|%s|%s]\n",
- lbl[i], cgipath, acct, list_id, server, port
- } else {
- printf "[1|%s|%s/list/sort%s%%09%s/%d|%s|%s]\n",
- lbl[i], cgipath, opt[i], acct, list_id, server, port
- }
- }
- exit 0
-}
-
-BEGIN {
- config_init()
-
- cgi_init()
- main()
-}
ADDED src/raw.m4
Index: src/raw.m4
==================================================================
--- /dev/null
+++ src/raw.m4
@@ -0,0 +1,46 @@
+include(config.m4)dnl
+#!__CMD_AWK__ -f
+
+# __CGIPATH__/debug
+#
+# Show HTTP headers from curl fetching a URL
+#
+# __CGIPATH__/links
+#
+# Show download links, plus links scraped from HTML document
+#
+# __CGIPATH__/raw
+#
+# Show raw bytes from binary document
+#
+# __CGIPATH__/text
+#
+# Show text content scraped from HTML document
+
+include(src/config.awk)
+incl(src/api.awk)
+incl(src/cgi.awk)
+incl(src/util.awk)
+incl(src/web.awk)
+
+function main() {
+ if (path == "/debug") {
+ dump(search, TYPE_HEADERS)
+ } else if (path == "/raw") {
+ dump(search, TYPE_RAW)
+ } else if (path == "/text") {
+ dump(search, TYPE_TEXT)
+ } else if (path == "/links") {
+ dump(search, TYPE_LINKS)
+ }
+ exit 0
+}
+
+BEGIN {
+ config_init()
+
+ cgi_init()
+ uri_encode_init()
+ web_init()
+ main()
+}
DELETED src/raw/index.cgi.m4
Index: src/raw/index.cgi.m4
==================================================================
--- src/raw/index.cgi.m4
+++ /dev/null
@@ -1,46 +0,0 @@
-include(config.m4)dnl
-#!__CMD_AWK__ -f
-
-# debug/index.cgi
-#
-# Show HTTP headers from curl fetching a URL
-#
-# links/index.dcgi
-#
-# Show download links, plus links scraped from HTML document
-#
-# raw/index.cgi
-#
-# Show raw bytes from binary document
-#
-# text/index.cgi
-#
-# Show text content scraped from HTML document
-
-include(src/config.awk)
-incl(src/api.awk)
-incl(src/cgi.awk)
-incl(src/util.awk)
-incl(src/web.awk)
-
-function main() {
- if (path == "/debug/") {
- dump(search, TYPE_HEADERS)
- } else if (path == "/raw/") {
- dump(search, TYPE_RAW)
- } else if (path == "/text/") {
- dump(search, TYPE_TEXT)
- } else if (path == "/links/") {
- dump(search, TYPE_LINKS)
- }
- exit 0
-}
-
-BEGIN {
- config_init()
-
- cgi_init()
- uri_encode_init()
- web_init()
- main()
-}
ADDED src/search.m4
Index: src/search.m4
==================================================================
--- /dev/null
+++ src/search.m4
@@ -0,0 +1,261 @@
+include(config.m4)dnl
+#!__CMD_AWK__ -f
+
+# __CGIPATH__/search
+#
+# Show search results
+
+include(src/config.awk)
+incl(src/api.awk)
+incl(src/cgi.awk)
+incl(src/util.awk)
+
+function main(search, cmd, count, creator, descr, field, fields, i,
+ iaout, id, item, items, jsout, label, numfound, order,
+ order_names, page, prefix, rows, searchstr, sort_param, str,
+ title, type, url)
+{
+ order_names["addeddate"] = "addeddate"
+ order_names["collection_size"] = "collection_size"
+ order_names["createddate"] = "createddate"
+ order_names["creator"] = "creatorSorter"
+ order_names["date"] = "date"
+ order_names["downloads"] = "downloads"
+ order_names["nav_order"] = "nav_order"
+ order_names["random"] = "random"
+ order_names["title"] = "titleSorter"
+ order_names["week"] = "week"
+
+ rows = 15
+ page = 1
+ order = ""
+
+ # parse out page number and sort orders
+ numfound = 1
+ while (numfound == 1) {
+ if (match(search, /\/rows[0-9][0-9]*$/)) {
+ rows = substr(search, RSTART + 5)
+ search = substr(search, 1, RSTART - 1)
+ } else if (match(search, /\/page[0-9][0-9]*$/)) {
+ page = substr(search, RSTART + 5)
+ search = substr(search, 1, RSTART - 1)
+ } else if (match(search, /\/sort[^\/]*$/)) {
+ if (length(order) == 0) {
+ sort_param = substr(search, RSTART + 1)
+ order = substr(sort_param, 5)
+ }
+ search = substr(search, 1, RSTART - 1)
+ } else {
+ numfound = 0
+ }
+ }
+
+ iaout = gettemp()
+ jsout = gettemp()
+
+ # special case for when the search term is an archive.org details URL
+ if (match(search, /^https:\/\/(www\.)?archive\.org\/details\//)) {
+ id = substr(search, RLENGTH + 1)
+ search = "identifier:" id
+ }
+
+ # special case to list similar items
+ if (match(search, /^similar:/)) {
+ id = substr(search, RLENGTH + 1)
+
+ url = api_endpoint "/details/" id "?RelatedItemsGet=1"
+ api_request(url, "GET", iaout)
+
+ # scrape similar items
+ FS = "\t"
+ items = 0
+ delete item
+ str = ""
+ while ((getline <iaout) > 0) {
+ if (match($0, /<a href="\/details\/[^"]*"/)) {
+ str = substr($0, RSTART + 18, RLENGTH - 19)
+ items++
+ item[items] = str
+ }
+ }
+ close(iaout)
+
+ # truncate temporary file for re-use
+ printf "" >iaout
+ close(iaout)
+
+ for (i = 1; i <= items; i++) {
+ if (i == 1) {
+ search = "identifier:" item[i]
+ } else {
+ search = search "%20OR%20identifier:" item[i]
+ }
+ }
+ }
+
+ # remove quotes from search string, since it gets quoted later
+ gsub(/"/, "", search)
+
+ # default sort orders if none were specified
+ if (length(order) == 0) {
+ if (search == "mediatype:collection" ||
+ search == "mediatype:(collection)")
+ {
+ order = "collection_size desc"
+ sort_param = "sort" order
+ } else {
+ order = "nav_order desc"
+ sort_param = "sort" order
+ }
+ }
+
+ # get search results
+
+ field[1] = "creator"
+ field[2] = "description"
+ field[3] = "identifier"
+ field[4] = "mediatype"
+ field[5] = "title"
+ fields = 5
+
+ # remove anyfield, a hobgoblin of consistency
+ searchstr = search
+ gsub(/anyfield:/, "", searchstr)
+
+ url = sprintf("%s/advancedsearch.php?q=%s&output=json&rows=%d&page=%d",
+ api_endpoint, searchstr, rows, page)
+ if (length(order) > 0) {
+ split(order, parts, " ")
+ url = url sprintf("&sort%%5B0%%5D=%s %s", order_names[parts[1]],
+ parts[2])
+ }
+ for (i = 1; i <= fields; i++) {
+ url = url sprintf("&fl%%5B%d%%5D=%s", i - 1, field[i])
+ }
+ api_request(url, "GET", iaout)
+
+ cmd = sprintf("%s <%s >%s 2>&1", cmd_json2tsv, iaout, jsout)
+ system(cmd)
+
+ numfound = 0
+ FS = "\t"
+ while ((getline <jsout) > 0) {
+ if ($1 == ".response.numFound" && $2 == "n") {
+ numfound = $3
+ }
+ }
+ close(jsout)
+
+ if (search ~ /^@/) {
+ numfound++
+ }
+ if (numfound == 0) {
+ print "Your search did not match any items in the Archive."
+ print "Try different keywords or a more general search."
+ print ""
+ printf "1PHAROS\t%s\t%s\t%s\n", docpath, server, port
+ unlink(jsout)
+ unlink(iaout)
+ return
+ } else {
+ pages = int(numfound / rows)
+ if (numfound % rows != 0) {
+ pages++
+ }
+ printf "# %s search results, page %d of %d\n", numfound,
+ page, pages
+ print ""
+ }
+
+ # format search results as a gopher directory (menu)
+ FS = "\t"
+ creator = ""
+ descr = ""
+ id = ""
+ title = ""
+ type = ""
+ count = 0
+
+ if (search ~ /^@/) {
+ printf "1Account %s\t%s/account?%s\t%s\t%s\n", search, cgipath,
+ search, server, port
+ }
+
+ while ((getline <jsout) > 0) {
+ if ($1 == ".response.docs[].creator" && $2 == "s") {
+ creator = $3
+ } else if ($1 == ".response.docs[].description" && $2 == "s") {
+ descr = $3
+ } else if ($1 == ".response.docs[].identifier" && $2 == "s") {
+ id = $3
+ } else if ($1 == ".response.docs[].mediatype" && $2 == "s") {
+ type = $3
+ } else if ($1 == ".response.docs[].title" && $2 == "s") {
+ # the title field happens to be toward the end of each item
+ title = $3
+ count++
+ if (length(creator) > 0) {
+ label = sprintf("[%s] %s by %s", mediatype[type],
+ shorten(title, 40), shorten(creator, 18))
+ } else {
+ label = sprintf("[%s] %s", mediatype[type],
+ shorten(title, 58))
+ }
+ if (type == "collection") {
+ printf "1%s\t%s/search?collection:(%s)\t%s\t%s\n",
+ label, cgipath, id, server, port
+ } else {
+ printf "1%s\t%s/details?%s\t%s\t%s\n", label, cgipath,
+ id, server, port
+ }
+ creator = ""
+ descr = ""
+ id = ""
+ type = ""
+ }
+ }
+ close(jsout)
+
+ print ""
+
+ # only show "page back" if the user is past page 1
+ if (page > 1) {
+ prefix = sprintf("/page%d/rows%d/%s", page - 1, rows, sort_param)
+ printf "1[<<] Page %d\t%s/search?%s%s\t%s\t%s\n",
+ page - 1, cgipath, search, prefix, server, port
+ }
+
+ # only show "next page" if the current page is completely full
+ if (count == rows) {
+ prefix = sprintf("/page%d/rows%d/%s", page + 1, rows, sort_param)
+ printf "1[>>] Page %d\t%s/search?%s%s\t%s\t%s\n",
+ page + 1, cgipath, search, prefix, server, port
+ }
+
+ # only show "sort" if there's more than one item to sort
+ if (count > 1) {
+ printf "1[^v] Sort\t%s/sort?%s\t%s\t%s\n", cgipath,
+ search, server, port
+ }
+
+ # only show "search within list" if there's multiple pages of results
+ if (numfound > rows) {
+ printf "1[\\/] Filter results\t%s/wizard/step1?%s\t%s\t%s\n",
+ cgipath, search, server, port
+ }
+
+ printf "1PHAROS\t%s\t%s\t%s\n", docpath, server, port
+
+ unlink(jsout)
+ unlink(iaout)
+ exit 0
+}
+
+BEGIN {
+ config_init()
+
+ cgi_init()
+ uri_encode_init()
+ util_init()
+ main(search)
+}
DELETED src/search/index.dcgi.m4
Index: src/search/index.dcgi.m4
==================================================================
--- src/search/index.dcgi.m4
+++ /dev/null
@@ -1,255 +0,0 @@
-include(config.m4)dnl
-#!__CMD_AWK__ -f
-
-# search/index.dcgi
-#
-# Show search results
-
-include(src/config.awk)
-incl(src/api.awk)
-incl(src/cgi.awk)
-incl(src/util.awk)
-
-function main(search, cmd, count, creator, descr, field, fields, i,
- iaout, id, item, items, jsout, label, numfound, order,
- order_names, page, rows, searchstr, sort_param, str, title, type,
- url)
-{
- order_names["addeddate"] = "addeddate"
- order_names["collection_size"] = "collection_size"
- order_names["createddate"] = "createddate"
- order_names["creator"] = "creatorSorter"
- order_names["date"] = "date"
- order_names["downloads"] = "downloads"
- order_names["nav_order"] = "nav_order"
- order_names["random"] = "random"
- order_names["title"] = "titleSorter"
- order_names["week"] = "week"
-
- rows = 15
- page = 1
- order = ""
-
- # parse out page number and sort orders
- for (i in parts) {
- if (parts[i] ~ /^rows[0-9][0-9]*$/) {
- rows = substr(parts[i], 5)
- } else if (parts[i] ~ /^page[0-9][0-9]*$/) {
- page = substr(parts[i], 5)
- } else if (parts[i] ~ /^sort/) {
- if (length(order) == 0) {
- sort_param = parts[i]
- order = substr(parts[i], 5)
- }
- }
- }
-
- iaout = gettemp()
- jsout = gettemp()
-
- # special case for when the search term is an archive.org details URL
- if (match(search, /^https:\/\/(www\.)?archive\.org\/details\//)) {
- id = substr(search, RLENGTH + 1)
- search = "identifier:" id
- }
-
- # special case to list similar items
- if (match(search, /^similar:/)) {
- id = substr(search, RLENGTH + 1)
-
- url = api_endpoint "/details/" id "?RelatedItemsGet=1"
- api_request(url, "GET", iaout)
-
- # scrape similar items
- FS = "\t"
- items = 0
- delete item
- str = ""
- while ((getline <iaout) > 0) {
- if (match($0, /<a href="\/details\/[^"]*"/)) {
- str = substr($0, RSTART + 18, RLENGTH - 19)
- items++
- item[items] = str
- }
- }
- close(iaout)
-
- # truncate temporary file for re-use
- printf "" >iaout
- close(iaout)
-
- for (i = 1; i <= items; i++) {
- if (i == 1) {
- search = "identifier:" item[i]
- } else {
- search = search "%20OR%20identifier:" item[i]
- }
- }
- }
-
- # remove quotes from search string, since it gets quoted later
- gsub(/"/, "", search)
-
- # default sort orders if none were specified
- if (length(order) == 0) {
- if (search == "mediatype:collection" ||
- search == "mediatype:(collection)")
- {
- order = "collection_size desc"
- sort_param = "sort" order
- } else {
- order = "nav_order desc"
- sort_param = "sort" order
- }
- }
-
- # get search results
-
- field[1] = "creator"
- field[2] = "description"
- field[3] = "identifier"
- field[4] = "mediatype"
- field[5] = "title"
- fields = 5
-
- # remove anyfield, a hobgoblin of consistency
- searchstr = search
- gsub(/anyfield:/, "", searchstr)
-
- url = sprintf("%s/advancedsearch.php?q=%s&output=json&rows=%d&page=%d",
- api_endpoint, searchstr, rows, page)
- if (length(order) > 0) {
- split(order, parts, " ")
- url = url sprintf("&sort%%5B0%%5D=%s %s", order_names[parts[1]],
- parts[2])
- }
- for (i = 1; i <= fields; i++) {
- url = url sprintf("&fl%%5B%d%%5D=%s", i - 1, field[i])
- }
- api_request(url, "GET", iaout)
-
- cmd = sprintf("%s <%s >%s 2>&1", cmd_json2tsv, iaout, jsout)
- system(cmd)
-
- numfound = 0
- FS = "\t"
- while ((getline <jsout) > 0) {
- if ($1 == ".response.numFound" && $2 == "n") {
- numfound = $3
- }
- }
- close(jsout)
-
- if (search ~ /^@/) {
- numfound++
- }
- if (numfound == 0) {
- print "Your search did not match any items in the Archive."
- print "Try different keywords or a more general search."
- print ""
- printf "[1|PHAROS|%s|%s|%s]\n", cgipath, server, port
- unlink(jsout)
- unlink(iaout)
- return
- } else {
- pages = int(numfound / rows)
- if (numfound % rows != 0) {
- pages++
- }
- printf "# %s search results, page %d of %d\n", numfound,
- page, pages
- print ""
- }
-
- # format search results as a gopher directory (menu)
- FS = "\t"
- creator = ""
- descr = ""
- id = ""
- title = ""
- type = ""
- count = 0
-
- if (search ~ /^@/) {
- printf "[1|Account %s|%s/account/%s|%s|%s]\n", search, cgipath,
- search, server, port
- }
-
- while ((getline <jsout) > 0) {
- if ($1 == ".response.docs[].creator" && $2 == "s") {
- creator = $3
- } else if ($1 == ".response.docs[].description" && $2 == "s") {
- descr = $3
- } else if ($1 == ".response.docs[].identifier" && $2 == "s") {
- id = $3
- } else if ($1 == ".response.docs[].mediatype" && $2 == "s") {
- type = $3
- } else if ($1 == ".response.docs[].title" && $2 == "s") {
- # the title field happens to be toward the end of each item
- title = $3
- count++
- if (length(creator) > 0) {
- label = sprintf("[%s] %s by %s", mediatype[type],
- gph_encode(shorten(title, 40)), shorten(creator, 18))
- } else {
- label = sprintf("[%s] %s", mediatype[type],
- gph_encode(shorten(title, 58)))
- }
- if (type == "collection") {
- printf "[1|%s|%s/search/%%09collection:(%s)|%s|%s]\n",
- label, cgipath, id, server, port
- } else {
- printf "[1|%s|%s/details/%s|%s|%s]\n", label, cgipath,
- id, server, port
- }
- creator = ""
- descr = ""
- id = ""
- type = ""
- }
- }
- close(jsout)
-
- print ""
-
- # only show "page back" if the user is past page 1
- if (page > 1) {
- printf "[1|[<<] Page %d|%s/search/page%d/rows%d/%s%%09%s|%s|%s]\n",
- page - 1, cgipath, page - 1, rows, sort_param, search,
- server, port
- }
-
- # only show "next page" if the current page is completely full
- if (count == rows) {
- printf "[1|[>>] Page %d|%s/search/page%d/rows%d/%s%%09%s|%s|%s]\n",
- page + 1, cgipath, page + 1, rows, sort_param, search,
- server, port
- }
-
- # only show "sort" if there's more than one item to sort
- if (count > 1) {
- printf "[1|[^v] Sort|%s/sort/%%09%s|%s|%s]\n", cgipath, search,
- server, port
- }
-
- # only show "search within list" if there's multiple pages of results
- if (numfound > rows) {
- printf "[1|[\\/] Filter results|%s/wizard/step1/%s|%s|%s]\n",
- cgipath, search, server, port
- }
-
- printf "[1|PHAROS|%s|%s|%s]\n", cgipath, server, port
-
- unlink(jsout)
- unlink(iaout)
- exit 0
-}
-
-BEGIN {
- config_init()
-
- cgi_init()
- uri_encode_init()
- util_init()
- main(search)
-}
ADDED src/software/gophermap.m4
Index: src/software/gophermap.m4
==================================================================
--- /dev/null
+++ src/software/gophermap.m4
@@ -0,0 +1,28 @@
+include(config.m4)dnl
+
+[1|All Software|__CGIPATH__/search/sortweek desc/%09mediatype:software|__SERVER__|__PORT__]
+[1|This Just In|__CGIPATH__/search/sortaddeddate desc/%09mediatype:software|__SERVER__|__PORT__]
+[1|Internet Arcade|__CGIPATH__/search/sortweek desc/%09collection:internetarcade|__SERVER__|__PORT__]
+[1|Console Living Room|__CGIPATH__/search/sortweek desc/%09collection:consolelivingroom|__SERVER__|__PORT__]
+[1|Old School Emulation|__CGIPATH__/search/sortweek desc/%09collection:tosec|__SERVER__|__PORT__]
+[1|MS-DOS Games|__CGIPATH__/search/sortweek desc/%09collection:softwarelibrary_msdos_games|__SERVER__|__PORT__]
+[1|Historical Software|__CGIPATH__/search/sortweek desc/%09collection:historicalsoftware|__SERVER__|__PORT__]
+[1|Classic PC Games|__CGIPATH__/search/sortweek desc/%09collection:classicpcgames|__SERVER__|__PORT__]
+[1|Software Library|__CGIPATH__/search/sortweek desc/%09collection:softwarelibrary AND mediatype:collection|__SERVER__|__PORT__]
+[1|Kodi Archive & Support Files|__CGIPATH__/search/sortweek desc/%09collection:kodi_archive|__SERVER__|__PORT__]
+[1|Vintage Software|__CGIPATH__/search/sortweek desc/%09collection:vintagesoftware|__SERVER__|__PORT__]
+[1|APK|__CGIPATH__/search/sortweek desc/%09collection:apkarchive|__SERVER__|__PORT__]
+[1|MS-DOS|__CGIPATH__/search/sortweek desc/%09collection:softwarelibrary_msdos|__SERVER__|__PORT__]
+[1|CD-ROM Software|__CGIPATH__/search/sortweek desc/%09collection:cd-roms|__SERVER__|__PORT__]
+[1|CD-ROM Software Library|__CGIPATH__/search/sortweek desc/%09collection:cdromsoftware|__SERVER__|__PORT__]
+[1|Software Sites|__CGIPATH__/search/sortweek desc/%09collection:softwaresites|__SERVER__|__PORT__]
+[1|Tucows Software Library|__CGIPATH__/search/sortweek desc/%09collection:tucows|__SERVER__|__PORT__]
+[1|Shareware CD-ROMs|__CGIPATH__/search/sortweek desc/%09collection:cdbbsarchive|__SERVER__|__PORT__]
+[1|Software Capsules Compilation|__CGIPATH__/search/sortweek desc/%09collection:softwarecapsules|__SERVER__|__PORT__]
+[1|CD-ROM Images|__CGIPATH__/search/sortweek desc/%09collection:cdromimages|__SERVER__|__PORT__]
+[1|Underground CD-ROM CompilationsLibrary|__CGIPATH__/search/sortweek desc/%09collection:undergroundcds|__SERVER__|__PORT__]
+[1|ZX Spectrum|__CGIPATH__/search/sortweek desc/%09collection:softwarelibrary_zx_spectrum|__SERVER__|__PORT__]
+[1|DOOM Level CD|__CGIPATH__/search/sortweek desc/%09collection:doom-cds|__SERVER__|__PORT__]
+[1|Floppy Disks of Software|__CGIPATH__/search/sortweek desc/%09collection:floppysoftware|__SERVER__|__PORT__]
+[1|The Good Old Days IBM PC Floppy Collection|__CGIPATH__/search/sortweek desc/%09collection:TGODFloppyCollection|__SERVER__|__PORT__]
+[1|MS-DOS: The Frostbyte Shareware Collection|__CGIPATH__/search/sortweek desc/%09collection:softwarelibrary_msdos_frostbyte|__SERVER__|__PORT__]
DELETED src/software/index.gph.m4
Index: src/software/index.gph.m4
==================================================================
--- src/software/index.gph.m4
+++ /dev/null
@@ -1,28 +0,0 @@
-include(config.m4)dnl
-
-[1|All Software|__CGIPATH__/search/sortweek desc/%09mediatype:software|__SERVER__|__PORT__]
-[1|This Just In|__CGIPATH__/search/sortaddeddate desc/%09mediatype:software|__SERVER__|__PORT__]
-[1|Internet Arcade|__CGIPATH__/search/sortweek desc/%09collection:internetarcade|__SERVER__|__PORT__]
-[1|Console Living Room|__CGIPATH__/search/sortweek desc/%09collection:consolelivingroom|__SERVER__|__PORT__]
-[1|Old School Emulation|__CGIPATH__/search/sortweek desc/%09collection:tosec|__SERVER__|__PORT__]
-[1|MS-DOS Games|__CGIPATH__/search/sortweek desc/%09collection:softwarelibrary_msdos_games|__SERVER__|__PORT__]
-[1|Historical Software|__CGIPATH__/search/sortweek desc/%09collection:historicalsoftware|__SERVER__|__PORT__]
-[1|Classic PC Games|__CGIPATH__/search/sortweek desc/%09collection:classicpcgames|__SERVER__|__PORT__]
-[1|Software Library|__CGIPATH__/search/sortweek desc/%09collection:softwarelibrary AND mediatype:collection|__SERVER__|__PORT__]
-[1|Kodi Archive & Support Files|__CGIPATH__/search/sortweek desc/%09collection:kodi_archive|__SERVER__|__PORT__]
-[1|Vintage Software|__CGIPATH__/search/sortweek desc/%09collection:vintagesoftware|__SERVER__|__PORT__]
-[1|APK|__CGIPATH__/search/sortweek desc/%09collection:apkarchive|__SERVER__|__PORT__]
-[1|MS-DOS|__CGIPATH__/search/sortweek desc/%09collection:softwarelibrary_msdos|__SERVER__|__PORT__]
-[1|CD-ROM Software|__CGIPATH__/search/sortweek desc/%09collection:cd-roms|__SERVER__|__PORT__]
-[1|CD-ROM Software Library|__CGIPATH__/search/sortweek desc/%09collection:cdromsoftware|__SERVER__|__PORT__]
-[1|Software Sites|__CGIPATH__/search/sortweek desc/%09collection:softwaresites|__SERVER__|__PORT__]
-[1|Tucows Software Library|__CGIPATH__/search/sortweek desc/%09collection:tucows|__SERVER__|__PORT__]
-[1|Shareware CD-ROMs|__CGIPATH__/search/sortweek desc/%09collection:cdbbsarchive|__SERVER__|__PORT__]
-[1|Software Capsules Compilation|__CGIPATH__/search/sortweek desc/%09collection:softwarecapsules|__SERVER__|__PORT__]
-[1|CD-ROM Images|__CGIPATH__/search/sortweek desc/%09collection:cdromimages|__SERVER__|__PORT__]
-[1|Underground CD-ROM CompilationsLibrary|__CGIPATH__/search/sortweek desc/%09collection:undergroundcds|__SERVER__|__PORT__]
-[1|ZX Spectrum|__CGIPATH__/search/sortweek desc/%09collection:softwarelibrary_zx_spectrum|__SERVER__|__PORT__]
-[1|DOOM Level CD|__CGIPATH__/search/sortweek desc/%09collection:doom-cds|__SERVER__|__PORT__]
-[1|Floppy Disks of Software|__CGIPATH__/search/sortweek desc/%09collection:floppysoftware|__SERVER__|__PORT__]
-[1|The Good Old Days IBM PC Floppy Collection|__CGIPATH__/search/sortweek desc/%09collection:TGODFloppyCollection|__SERVER__|__PORT__]
-[1|MS-DOS: The Frostbyte Shareware Collection|__CGIPATH__/search/sortweek desc/%09collection:softwarelibrary_msdos_frostbyte|__SERVER__|__PORT__]
ADDED src/sort.m4
Index: src/sort.m4
==================================================================
--- /dev/null
+++ src/sort.m4
@@ -0,0 +1,61 @@
+include(config.m4)dnl
+#!__CMD_AWK__ -f
+
+# __CGIPATH__/sort
+#
+# Change search sort order
+
+include(src/config.awk)
+incl(src/cgi.awk)
+
+function main( i, lbl, opt) {
+ lbl[1] = "Default [^]"
+ opt[1] = "nav_order asc"
+ lbl[2] = "Default [v]"
+ opt[2] = "nav_order desc"
+ lbl[3] = "Weekly views [^]"
+ opt[3] = "week asc"
+ lbl[4] = "Weekly views [v]"
+ opt[4] = "week desc"
+ lbl[5] = "All-time views [^]"
+ opt[5] = "downloads asc"
+ lbl[6] = "All-time views [v]"
+ opt[6] = "downloads desc"
+ lbl[7] = "Title [^]"
+ opt[7] = "title asc"
+ lbl[8] = "Title [v]"
+ opt[8] = "title desc"
+ lbl[9] = "Date added [^]"
+ opt[9] = "addeddate asc"
+ lbl[10] = "Date added [v]"
+ opt[10] = "addeddate desc"
+ lbl[11] = "Date created [^]"
+ opt[11] = "createddate asc"
+ lbl[12] = "Date created [v]"
+ opt[12] = "createddate desc"
+ lbl[13] = "Date published [^]"
+ opt[13] = "date asc"
+ lbl[14] = "Date published [v]"
+ opt[14] = "date desc"
+ lbl[15] = "Creator [^]"
+ opt[15] = "creator asc"
+ lbl[16] = "Creator [v]"
+ opt[16] = "creator desc"
+ lbl[17] = "Random"
+ opt[17] = "random asc"
+
+ print "# Sort by"
+ print ""
+ for (i = 1; i < 18; i++) {
+ printf "1%s\t%s/search?%s/sort%s\t%s\t%s\n",
+ lbl[i], cgipath, search, opt[i], server, port
+ }
+ exit 0
+}
+
+BEGIN {
+ config_init()
+
+ cgi_init()
+ main()
+}
DELETED src/sort/index.dcgi.m4
Index: src/sort/index.dcgi.m4
==================================================================
--- src/sort/index.dcgi.m4
+++ /dev/null
@@ -1,61 +0,0 @@
-include(config.m4)dnl
-#!__CMD_AWK__ -f
-
-# sort/index.dcgi
-#
-# Change search sort order
-
-include(src/config.awk)
-incl(src/cgi.awk)
-
-function main( i, lbl, opt) {
- lbl[1] = "Default [^]"
- opt[1] = "nav_order asc"
- lbl[2] = "Default [v]"
- opt[2] = "nav_order desc"
- lbl[3] = "Weekly views [^]"
- opt[3] = "week asc"
- lbl[4] = "Weekly views [v]"
- opt[4] = "week desc"
- lbl[5] = "All-time views [^]"
- opt[5] = "downloads asc"
- lbl[6] = "All-time views [v]"
- opt[6] = "downloads desc"
- lbl[7] = "Title [^]"
- opt[7] = "title asc"
- lbl[8] = "Title [v]"
- opt[8] = "title desc"
- lbl[9] = "Date added [^]"
- opt[9] = "addeddate asc"
- lbl[10] = "Date added [v]"
- opt[10] = "addeddate desc"
- lbl[11] = "Date created [^]"
- opt[11] = "createddate asc"
- lbl[12] = "Date created [v]"
- opt[12] = "createddate desc"
- lbl[13] = "Date published [^]"
- opt[13] = "date asc"
- lbl[14] = "Date published [v]"
- opt[14] = "date desc"
- lbl[15] = "Creator [^]"
- opt[15] = "creator asc"
- lbl[16] = "Creator [v]"
- opt[16] = "creator desc"
- lbl[17] = "Random"
- opt[17] = "random asc"
-
- print "# Sort by"
- print ""
- for (i = 1; i < 18; i++) {
- printf "[1|%s|%s/search/sort%s%%09%s|%s|%s]\n",
- lbl[i], cgipath, opt[i], search, server, port
- }
- exit 0
-}
-
-BEGIN {
- config_init()
-
- cgi_init()
- main()
-}
Index: src/util.awk
==================================================================
--- src/util.awk
+++ src/util.awk
@@ -117,17 +117,17 @@
print "" >>output
print "Items may be taken down for various reasons," >>output
print "including by decision of the uploader or" >>output
print "due to a violation of the Terms of Use." >>output
print "" >>output
- printf "[h|Metadata|URL:%s|%s|%s]\n", url, server, port >>output
+ printf "hMetadata\tURL:%s\t%s\t%s\n", url, server, port >>output
print "" >>output
url = api_ssl_endpoint "/about/terms.php"
- printf "[0|Terms of Use|%s/text/%%09%s|%s|%s]\n", cgipath,
+ printf "0Terms of Use\t%s/text?%s\t%s\t%s\n", cgipath,
url, server, port >>output
print "" >>output
- printf "[1|PHAROS|%s|%s|%s]\n", cgipath, server, port >>output
+ printf "1PHAROS\t%s\t%s\t%s\n", docpath, server, port >>output
return
}
function read_file(name, retval) {
while ((getline <name) > 0) {
ADDED src/video/gophermap.m4
Index: src/video/gophermap.m4
==================================================================
--- /dev/null
+++ src/video/gophermap.m4
@@ -0,0 +1,23 @@
+include(config.m4)dnl
+# Video
+
+[1|All Video|__CGIPATH__/search/sortweek desc/%09mediatype:movies|__SERVER__|__PORT__]
+[1|This Just In|__CGIPATH__/search/sortaddeddate desc/%09mediatype:movies|__SERVER__|__PORT__]
+[1|Prelinger Archives|__CGIPATH__/search/sortweek desc/%09collection:prelinger|__SERVER__|__PORT__]
+[1|Democracy Now!|__CGIPATH__/search/sortweek desc/%09collection:democracy_now_vid|__SERVER__|__PORT__]
+[1|Occupy Wall Street|__CGIPATH__/search/sortweek desc/%09collection:occupywallstreet|__SERVER__|__PORT__]
+[1|TV NSA Clip Library|__CGIPATH__/search/sortweek desc/%09collection:nsa|__SERVER__|__PORT__]
+[1|Animation And Cartoons|__CGIPATH__/search/sortweek desc/%09collection:animationandcartoons|__SERVER__|__PORT__]
+[1|Arts & Music|__CGIPATH__/search/sortweek desc/%09collection:artsandmusicvideos|__SERVER__|__PORT__]
+[1|Computers & Technology|__CGIPATH__/search/sortweek desc/%09collection:computersandtechvideos|__SERVER__|__PORT__]
+[1|Cultural & Academic Films|__CGIPATH__/search/sortweek desc/%09collection:culturalandacademicfilms|__SERVER__|__PORT__]
+[1|Ephemeral Films|__CGIPATH__/search/sortweek desc/%09collection:ephemera|__SERVER__|__PORT__]
+[1|Movies|__CGIPATH__/search/sortweek desc/%09collection:moviesandfilms|__SERVER__|__PORT__]
+[1|News & Public Affairs|__CGIPATH__/search/sortweek desc/%09collection:newsandpublicaffairs|__SERVER__|__PORT__]
+[1|Spirituality & Religion|__CGIPATH__/search/sortweek desc/%09collection:spiritualityandreligion|__SERVER__|__PORT__]
+[1|Sports|__CGIPATH__/search/sortweek desc/%09collection:sports|__SERVER__|__PORT__]
+[1|Television|__CGIPATH__/search/sortweek desc/%09collection:television AND mediatype:collection|__SERVER__|__PORT__]
+[1|VHS Vault|__CGIPATH__/search/sortweek desc/%09collection:vhsvault|__SERVER__|__PORT__]
+[1|Video Games|__CGIPATH__/search/sortweek desc/%09collection:gamevideos AND mediatype:collection|__SERVER__|__PORT__]
+[1|Vlogs|__CGIPATH__/search/sortweek desc/%09collection:vlogs|__SERVER__|__PORT__]
+[1|Youth Media|__CGIPATH__/search/sortweek desc/%09collection:youth_media|__SERVER__|__PORT__]
DELETED src/video/index.gph.m4
Index: src/video/index.gph.m4
==================================================================
--- src/video/index.gph.m4
+++ /dev/null
@@ -1,23 +0,0 @@
-include(config.m4)dnl
-# Video
-
-[1|All Video|__CGIPATH__/search/sortweek desc/%09mediatype:movies|__SERVER__|__PORT__]
-[1|This Just In|__CGIPATH__/search/sortaddeddate desc/%09mediatype:movies|__SERVER__|__PORT__]
-[1|Prelinger Archives|__CGIPATH__/search/sortweek desc/%09collection:prelinger|__SERVER__|__PORT__]
-[1|Democracy Now!|__CGIPATH__/search/sortweek desc/%09collection:democracy_now_vid|__SERVER__|__PORT__]
-[1|Occupy Wall Street|__CGIPATH__/search/sortweek desc/%09collection:occupywallstreet|__SERVER__|__PORT__]
-[1|TV NSA Clip Library|__CGIPATH__/search/sortweek desc/%09collection:nsa|__SERVER__|__PORT__]
-[1|Animation And Cartoons|__CGIPATH__/search/sortweek desc/%09collection:animationandcartoons|__SERVER__|__PORT__]
-[1|Arts & Music|__CGIPATH__/search/sortweek desc/%09collection:artsandmusicvideos|__SERVER__|__PORT__]
-[1|Computers & Technology|__CGIPATH__/search/sortweek desc/%09collection:computersandtechvideos|__SERVER__|__PORT__]
-[1|Cultural & Academic Films|__CGIPATH__/search/sortweek desc/%09collection:culturalandacademicfilms|__SERVER__|__PORT__]
-[1|Ephemeral Films|__CGIPATH__/search/sortweek desc/%09collection:ephemera|__SERVER__|__PORT__]
-[1|Movies|__CGIPATH__/search/sortweek desc/%09collection:moviesandfilms|__SERVER__|__PORT__]
-[1|News & Public Affairs|__CGIPATH__/search/sortweek desc/%09collection:newsandpublicaffairs|__SERVER__|__PORT__]
-[1|Spirituality & Religion|__CGIPATH__/search/sortweek desc/%09collection:spiritualityandreligion|__SERVER__|__PORT__]
-[1|Sports|__CGIPATH__/search/sortweek desc/%09collection:sports|__SERVER__|__PORT__]
-[1|Television|__CGIPATH__/search/sortweek desc/%09collection:television AND mediatype:collection|__SERVER__|__PORT__]
-[1|VHS Vault|__CGIPATH__/search/sortweek desc/%09collection:vhsvault|__SERVER__|__PORT__]
-[1|Video Games|__CGIPATH__/search/sortweek desc/%09collection:gamevideos AND mediatype:collection|__SERVER__|__PORT__]
-[1|Vlogs|__CGIPATH__/search/sortweek desc/%09collection:vlogs|__SERVER__|__PORT__]
-[1|Youth Media|__CGIPATH__/search/sortweek desc/%09collection:youth_media|__SERVER__|__PORT__]
Index: src/web.awk
==================================================================
--- src/web.awk
+++ src/web.awk
@@ -1,8 +1,7 @@
function dump(search, type, base, cmd, curlcfg, is_html, is_image,
- label, limit, link, marker, parts, prefix, proto, relative, root,
- url)
+ label, limit, link, marker, parts, proto, relative, root, url)
{
url = search
gsub(/%3F/, "?", url)
if (url !~ /^(http|https):\/\/[[:alnum:].-]+(:[0-9]+)*(\/[[:alnum:].,?@~=%%:\/+&_() -]*)*$/) {
@@ -75,32 +74,32 @@
marker = 999999
if (type == TYPE_LINKS) {
is_html = detect_html(url)
is_image = detect_image(url)
- printf "[9|Binary download|%s/raw/%%09%s|%s|%s]\n", cgipath,
+ printf "9Binary download\t%s/raw?%s\t%s\t%s\n", cgipath,
search, server, port
if (is_image) {
- printf "[I|Image view|%s/raw/%%09%s|%s|%s]\n", cgipath,
+ printf "IImage view\t%s/raw?%s\t%s\t%s\n", cgipath,
search, server, port
}
if (is_html) {
label = "Source"
} else {
label = "Text view"
}
- printf "[0|%s|%s/raw/%%09%s|%s|%s]\n", label, cgipath, search,
+ printf "0%s\t%s/raw?%s\t%s\t%s\n", label, cgipath, search,
server, port
if (is_html) {
label = "HTML view"
} else {
label = "Strings"
}
- printf "[0|%s|%s/text/%%09%s|%s|%s]\n", label, cgipath, search,
- server, port
- printf "[0|Headers|%s/debug/%%09%s|%s|%s]\n", cgipath, search,
- server, port
+ printf "0%s\t%s/text?%s\t%s\t%s\n", label, cgipath,
+ search, server, port
+ printf "0Headers\t%s/debug?%s\t%s\t%s\n", cgipath,
+ search, server, port
print ""
}
while ((cmd | getline) > 0) {
if (NR < marker) {
@@ -191,11 +190,11 @@
id = substr(id, 1, RSTART - 1)
}
}
if (length(id) > 0) {
label = prefix id
- printf "[1|%s|%s/details/%s|%s|%s]\n", label, cgipath,
+ printf "1%s\t%s/details?%s\t%s\t%s\n", label, cgipath,
id, server, port >>output
} else {
print str >>output
}
} else {
ADDED src/wizard/gophermap
Index: src/wizard/gophermap
==================================================================
--- /dev/null
+++ src/wizard/gophermap
@@ -0,0 +1,10 @@
+i , _
+i /| | |
+i _/_\_ >_<
+i .-\-/. |
+i / | | \_ |
+i \ \| |\__(/
+i /(`---') |
+i / / \ |
+i _.' \'-' / |
+i `----'`=-=' ' hjw
ADDED src/wizard/step1.m4
Index: src/wizard/step1.m4
==================================================================
--- /dev/null
+++ src/wizard/step1.m4
@@ -0,0 +1,65 @@
+include(config.m4)dnl
+#!__CMD_AWK__ -f
+
+# __CGIPATH__/wizard/step1
+#
+# Select field to filter/search by
+
+include(src/config.awk)
+incl(src/cgi.awk)
+
+function main( i, lbl, opt) {
+ lbl[1] = "Any field contains"
+ opt[1] = "anyfield"
+ lbl[2] = "Any field does not contain"
+ opt[2] = "-anyfield"
+ lbl[3] = "Title contains"
+ opt[3] = "title"
+ lbl[4] = "Title does not contain"
+ opt[4] = "-title"
+ lbl[5] = "Creator contains"
+ opt[5] = "creator"
+ lbl[6] = "Creator does not contain"
+ opt[6] = "-creator"
+ lbl[7] = "Description contains"
+ opt[7] = "description"
+ lbl[8] = "Description does not contain"
+ opt[8] = "-description"
+ lbl[9] = "Collection is"
+ opt[9] = "collection"
+ lbl[10] = "Collection does not contain"
+ opt[10] = "-collection"
+ lbl[11] = "Mediatype is"
+ opt[11] = "mediatype"
+ lbl[12] = "Mediatype does not contain"
+ opt[12] = "-mediatype"
+ lbl[13] = "Date or date range is"
+ opt[13] = "date"
+ lbl[14] = "Language contains"
+ opt[14] = "language"
+ lbl[15] = "Always available"
+ opt[15] = "-access-restricted-item"
+
+ print "# Search wizard: Select field"
+ print ""
+ for (i = 1; i < 16; i++) {
+ if (opt[i] ~ /mediatype$/) {
+ printf "1%s\t%s/wizard/step2?%s/%s\t%s\t%]\n",
+ lbl[i], cgipath, search, opt[i], server, port
+ } else if (lbl[i] == "Always available") {
+ printf "1%s\t%s/wizard/step3?%s/%s/true\t%s\t%s\n",
+ lbl[i], cgipath, search, opt[i], server, port
+ } else {
+ printf "7%s\t%s/wizard/step3?%s/%s\t%s\t%s\n",
+ lbl[i], cgipath, search, opt[i], server, port
+ }
+ }
+ exit 0
+}
+
+BEGIN {
+ config_init()
+
+ cgi_init()
+ main()
+}
DELETED src/wizard/step1/index.dcgi.m4
Index: src/wizard/step1/index.dcgi.m4
==================================================================
--- src/wizard/step1/index.dcgi.m4
+++ /dev/null
@@ -1,67 +0,0 @@
-include(config.m4)dnl
-#!__CMD_AWK__ -f
-
-# wizard/step1/index.dcgi
-#
-# Select field to filter/search by
-
-include(src/config.awk)
-incl(src/cgi.awk)
-
-function main( i, lbl, opt, searchstr) {
- searchstr = parts[4]
-
- lbl[1] = "Any field contains"
- opt[1] = "anyfield"
- lbl[2] = "Any field does not contain"
- opt[2] = "-anyfield"
- lbl[3] = "Title contains"
- opt[3] = "title"
- lbl[4] = "Title does not contain"
- opt[4] = "-title"
- lbl[5] = "Creator contains"
- opt[5] = "creator"
- lbl[6] = "Creator does not contain"
- opt[6] = "-creator"
- lbl[7] = "Description contains"
- opt[7] = "description"
- lbl[8] = "Description does not contain"
- opt[8] = "-description"
- lbl[9] = "Collection is"
- opt[9] = "collection"
- lbl[10] = "Collection does not contain"
- opt[10] = "-collection"
- lbl[11] = "Mediatype is"
- opt[11] = "mediatype"
- lbl[12] = "Mediatype does not contain"
- opt[12] = "-mediatype"
- lbl[13] = "Date or date range is"
- opt[13] = "date"
- lbl[14] = "Language contains"
- opt[14] = "language"
- lbl[15] = "Always available"
- opt[15] = "-access-restricted-item"
-
- print "# Search wizard: Select field"
- print ""
- for (i = 1; i < 16; i++) {
- if (opt[i] ~ /mediatype$/) {
- printf "[1|%s|%s/wizard/step2/%s/%s|%s|%s]\n", lbl[i],
- cgipath, opt[i], searchstr, server, port
- } else if (lbl[i] == "Always available") {
- printf "[1|%s|%s/wizard/step3/%s/%s%%09true|%s|%s]\n",
- lbl[i], cgipath, opt[i], searchstr, server, port
- } else {
- printf "[7|%s|%s/wizard/step3/%s/%s|%s|%s]\n", lbl[i],
- cgipath, opt[i], searchstr, server, port
- }
- }
- exit 0
-}
-
-BEGIN {
- config_init()
-
- cgi_init()
- main()
-}
ADDED src/wizard/step2.m4
Index: src/wizard/step2.m4
==================================================================
--- /dev/null
+++ src/wizard/step2.m4
@@ -0,0 +1,51 @@
+include(config.m4)dnl
+#!__CMD_AWK__ -f
+
+# __CGIPATH__/wizard/step2
+#
+# Select mediatype to filter/search by
+
+include(src/config.awk)
+incl(src/cgi.awk)
+
+function main( field, newsearch, searchstr) {
+ field = parts[4]
+ searchstr = parts[5]
+
+ if (field == "mediatype") {
+ print "# Mediatype is:"
+ } else {
+ print "# Mediatype does not contain:"
+ }
+ print ""
+
+ lbl[1] = "audio"
+ lbl[2] = "collection"
+ lbl[3] = "data"
+ lbl[4] = "etree"
+ lbl[5] = "image"
+ lbl[6] = "movies"
+ lbl[7] = "software"
+ lbl[8] = "texts"
+ lbl[9] = "web"
+ for (i = 1; i < 10; i++) {
+ if (length(searchstr) == 0) {
+ newsearch = sprintf("%s:(%s)", field, lbl[i])
+ } else {
+ newsearch = sprintf("%s AND %s:(%s)", searchstr, field, lbl[i])
+ }
+ printf "1%s\t%s/search?%s\t%s\t%s\n", lbl[i], cgipath,
+ newsearch, server, port
+ }
+ print "# Progress:"
+ print ""
+ print "* Field: Mediatype"
+ exit 0
+}
+
+BEGIN {
+ config_init()
+
+ cgi_init()
+ main()
+}
DELETED src/wizard/step2/index.dcgi.m4
Index: src/wizard/step2/index.dcgi.m4
==================================================================
--- src/wizard/step2/index.dcgi.m4
+++ /dev/null
@@ -1,51 +0,0 @@
-include(config.m4)dnl
-#!__CMD_AWK__ -f
-
-# wizard/step2/index.dcgi
-#
-# Select mediatype to filter/search by
-
-include(src/config.awk)
-incl(src/cgi.awk)
-
-function main( field, newsearch, searchstr) {
- field = parts[4]
- searchstr = parts[5]
-
- if (field == "mediatype") {
- print "# Mediatype is:"
- } else {
- print "# Mediatype does not contain:"
- }
- print ""
-
- lbl[1] = "audio"
- lbl[2] = "collection"
- lbl[3] = "data"
- lbl[4] = "etree"
- lbl[5] = "image"
- lbl[6] = "movies"
- lbl[7] = "software"
- lbl[8] = "texts"
- lbl[9] = "web"
- for (i = 1; i < 10; i++) {
- if (length(searchstr) == 0) {
- newsearch = sprintf("%s:(%s)", field, lbl[i])
- } else {
- newsearch = sprintf("%s AND %s:(%s)", searchstr, field, lbl[i])
- }
- printf "[1|%s|%s/search/%%09%s|%s|%s]\n", lbl[i], cgipath,
- newsearch, server, port
- }
- print "# Progress:"
- print ""
- print "* Field: Mediatype"
- exit 0
-}
-
-BEGIN {
- config_init()
-
- cgi_init()
- main()
-}
ADDED src/wizard/step3.m4
Index: src/wizard/step3.m4
==================================================================
--- /dev/null
+++ src/wizard/step3.m4
@@ -0,0 +1,45 @@
+include(config.m4)dnl
+#!__CMD_AWK__ -f
+
+# __CGIPATH__/wizard/step3
+#
+# Apply new search terms
+
+include(src/config.awk)
+incl(src/cgi.awk)
+
+function main( field, label, newsearch, op, searchstr, value) {
+ split(search, parts, "/")
+ searchstr = parts[1]
+ field = parts[2]
+ value = searchreq
+
+ if (field ~ /^-/) {
+ label = toupper(substr(field, 2, 1)) substr(field, 3)
+ op = "does not contain"
+ } else {
+ label = toupper(substr(field, 1, 1)) substr(field, 2)
+ op = "contains"
+ }
+ if (length(searchstr) == 0) {
+ newsearch = sprintf("%s:(%s)", field, value)
+ } else {
+ newsearch = sprintf("%s AND %s:(%s)", searchstr, field, value)
+ }
+ print ""
+ printf "1Apply search criteria\t%s/search?%s\t%s\t%s\n",
+ cgipath, newsearch, server, port
+ print ""
+ print "# Progress:"
+ print ""
+ printf "* Field %s %s %s\n", label, op, value
+ printf "* New search: %s\n", newsearch
+ exit 0
+}
+
+BEGIN {
+ config_init()
+
+ cgi_init()
+ main()
+}
DELETED src/wizard/step3/index.dcgi.m4
Index: src/wizard/step3/index.dcgi.m4
==================================================================
--- src/wizard/step3/index.dcgi.m4
+++ /dev/null
@@ -1,44 +0,0 @@
-include(config.m4)dnl
-#!__CMD_AWK__ -f
-
-# wizard/step3/index.dcgi
-#
-# Apply new search terms
-
-include(src/config.awk)
-incl(src/cgi.awk)
-
-function main( field, label, newsearch, op, searchstr, value) {
- field = parts[4]
- searchstr = parts[5]
- value = search
-
- if (field ~ /^-/) {
- label = toupper(substr(field, 2, 1)) substr(field, 3)
- op = "does not contain"
- } else {
- label = toupper(substr(field, 1, 1)) substr(field, 2)
- op = "contains"
- }
- if (length(searchstr) == 0) {
- newsearch = sprintf("%s:(%s)", field, value)
- } else {
- newsearch = sprintf("%s AND %s:(%s)", searchstr, field, value)
- }
- print ""
- printf "[1|Apply search criteria|%s/search/%%09%s|%s|%s]\n",
- cgipath, newsearch, server, port
- print ""
- print "# Progress:"
- print ""
- printf "* Field %s %s %s\n", label, op, value
- printf "* New search: %s\n", newsearch
- exit 0
-}
-
-BEGIN {
- config_init()
-
- cgi_init()
- main()
-}