Sphinx generated HTML.

This commit is contained in:
Ole Tange 2021-07-11 22:56:53 +02:00
parent a00733e2af
commit 56693a99ac
6 changed files with 256 additions and 946 deletions

View file

@ -268,6 +268,12 @@ Quote of the month:
New in this release:
* --results no longer prints the result to standard output (stdout).
* parset can save in associative arrays in bash/ksh/zsh.
* Online HTML is now generated by Sphinx.
* Bug fixes and man page updates.
News about GNU Parallel:

View file

@ -2,6 +2,8 @@
#
# SPDX-License-Identifier: GPL-3.0-or-later
SUBDIRS = sphinx
bin_SCRIPTS = parallel sql niceload parcat parset parsort \
env_parallel env_parallel.ash env_parallel.bash \
env_parallel.csh env_parallel.dash env_parallel.fish \

View file

@ -1,946 +0,0 @@
# Makefile.in generated by automake 1.16.1 from Makefile.am.
# @configure_input@
# Copyright (C) 1994-2018 Free Software Foundation, Inc.
# This Makefile.in is free software; the Free Software Foundation
# gives unlimited permission to copy and/or distribute it,
# with or without modifications, as long as this notice is preserved.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY, to the extent permitted by law; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE.
@SET_MAKE@
# SPDX-FileCopyrightText: 2002-2021 Ole Tange, http://ole.tange.dk and Free Software and Foundation, Inc.
#
# SPDX-License-Identifier: GPL-3.0-or-later
VPATH = @srcdir@
am__is_gnu_make = { \
if test -z '$(MAKELEVEL)'; then \
false; \
elif test -n '$(MAKE_HOST)'; then \
true; \
elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \
true; \
else \
false; \
fi; \
}
am__make_running_with_option = \
case $${target_option-} in \
?) ;; \
*) echo "am__make_running_with_option: internal error: invalid" \
"target option '$${target_option-}' specified" >&2; \
exit 1;; \
esac; \
has_opt=no; \
sane_makeflags=$$MAKEFLAGS; \
if $(am__is_gnu_make); then \
sane_makeflags=$$MFLAGS; \
else \
case $$MAKEFLAGS in \
*\\[\ \ ]*) \
bs=\\; \
sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \
| sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \
esac; \
fi; \
skip_next=no; \
strip_trailopt () \
{ \
flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \
}; \
for flg in $$sane_makeflags; do \
test $$skip_next = yes && { skip_next=no; continue; }; \
case $$flg in \
*=*|--*) continue;; \
-*I) strip_trailopt 'I'; skip_next=yes;; \
-*I?*) strip_trailopt 'I';; \
-*O) strip_trailopt 'O'; skip_next=yes;; \
-*O?*) strip_trailopt 'O';; \
-*l) strip_trailopt 'l'; skip_next=yes;; \
-*l?*) strip_trailopt 'l';; \
-[dEDm]) skip_next=yes;; \
-[JT]) skip_next=yes;; \
esac; \
case $$flg in \
*$$target_option*) has_opt=yes; break;; \
esac; \
done; \
test $$has_opt = yes
am__make_dryrun = (target_option=n; $(am__make_running_with_option))
am__make_keepgoing = (target_option=k; $(am__make_running_with_option))
pkgdatadir = $(datadir)/@PACKAGE@
pkgincludedir = $(includedir)/@PACKAGE@
pkglibdir = $(libdir)/@PACKAGE@
pkglibexecdir = $(libexecdir)/@PACKAGE@
am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd
install_sh_DATA = $(install_sh) -c -m 644
install_sh_PROGRAM = $(install_sh) -c
install_sh_SCRIPT = $(install_sh) -c
INSTALL_HEADER = $(INSTALL_DATA)
transform = $(program_transform_name)
NORMAL_INSTALL = :
PRE_INSTALL = :
POST_INSTALL = :
NORMAL_UNINSTALL = :
PRE_UNINSTALL = :
POST_UNINSTALL = :
subdir = src
ACLOCAL_M4 = $(top_srcdir)/aclocal.m4
am__aclocal_m4_deps = $(top_srcdir)/configure.ac
am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \
$(ACLOCAL_M4)
DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON)
mkinstalldirs = $(install_sh) -d
CONFIG_HEADER = $(top_builddir)/config.h
CONFIG_CLEAN_FILES =
CONFIG_CLEAN_VPATH_FILES =
am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`;
am__vpath_adj = case $$p in \
$(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \
*) f=$$p;; \
esac;
am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`;
am__install_max = 40
am__nobase_strip_setup = \
srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'`
am__nobase_strip = \
for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||"
am__nobase_list = $(am__nobase_strip_setup); \
for p in $$list; do echo "$$p $$p"; done | \
sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \
$(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \
if (++n[$$2] == $(am__install_max)) \
{ print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \
END { for (dir in files) print dir, files[dir] }'
am__base_list = \
sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \
sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g'
am__uninstall_files_from_dir = { \
test -z "$$files" \
|| { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \
|| { echo " ( cd '$$dir' && rm -f" $$files ")"; \
$(am__cd) "$$dir" && rm -f $$files; }; \
}
am__installdirs = "$(DESTDIR)$(bindir)" "$(DESTDIR)$(man1dir)" \
"$(DESTDIR)$(man7dir)" "$(DESTDIR)$(docdir)"
SCRIPTS = $(bin_SCRIPTS)
AM_V_P = $(am__v_P_@AM_V@)
am__v_P_ = $(am__v_P_@AM_DEFAULT_V@)
am__v_P_0 = false
am__v_P_1 = :
AM_V_GEN = $(am__v_GEN_@AM_V@)
am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@)
am__v_GEN_0 = @echo " GEN " $@;
am__v_GEN_1 =
AM_V_at = $(am__v_at_@AM_V@)
am__v_at_ = $(am__v_at_@AM_DEFAULT_V@)
am__v_at_0 = @
am__v_at_1 =
SOURCES =
DIST_SOURCES =
am__can_run_installinfo = \
case $$AM_UPDATE_INFO_DIR in \
n|no|NO) false;; \
*) (install-info --version) >/dev/null 2>&1;; \
esac
man1dir = $(mandir)/man1
man7dir = $(mandir)/man7
NROFF = nroff
MANS = $(man_MANS)
DATA = $(doc_DATA)
am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP)
am__DIST_COMMON = $(srcdir)/Makefile.in
DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST)
ACLOCAL = @ACLOCAL@
AMTAR = @AMTAR@
AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@
AUTOCONF = @AUTOCONF@
AUTOHEADER = @AUTOHEADER@
AUTOMAKE = @AUTOMAKE@
AWK = @AWK@
CYGPATH_W = @CYGPATH_W@
DEFS = @DEFS@
ECHO_C = @ECHO_C@
ECHO_N = @ECHO_N@
ECHO_T = @ECHO_T@
INSTALL = @INSTALL@
INSTALL_DATA = @INSTALL_DATA@
INSTALL_PROGRAM = @INSTALL_PROGRAM@
INSTALL_SCRIPT = @INSTALL_SCRIPT@
INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@
LIBOBJS = @LIBOBJS@
LIBS = @LIBS@
LN_S = @LN_S@
LTLIBOBJS = @LTLIBOBJS@
MAKEINFO = @MAKEINFO@
MKDIR_P = @MKDIR_P@
PACKAGE = @PACKAGE@
PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@
PACKAGE_NAME = @PACKAGE_NAME@
PACKAGE_STRING = @PACKAGE_STRING@
PACKAGE_TARNAME = @PACKAGE_TARNAME@
PACKAGE_URL = @PACKAGE_URL@
PACKAGE_VERSION = @PACKAGE_VERSION@
PATH_SEPARATOR = @PATH_SEPARATOR@
SET_MAKE = @SET_MAKE@
SHELL = @SHELL@
STRIP = @STRIP@
VERSION = @VERSION@
abs_builddir = @abs_builddir@
abs_srcdir = @abs_srcdir@
abs_top_builddir = @abs_top_builddir@
abs_top_srcdir = @abs_top_srcdir@
am__leading_dot = @am__leading_dot@
am__tar = @am__tar@
am__untar = @am__untar@
bindir = @bindir@
build_alias = @build_alias@
builddir = @builddir@
datadir = @datadir@
datarootdir = @datarootdir@
docdir = @docdir@
dvidir = @dvidir@
exec_prefix = @exec_prefix@
host_alias = @host_alias@
htmldir = @htmldir@
includedir = @includedir@
infodir = @infodir@
install_sh = @install_sh@
libdir = @libdir@
libexecdir = @libexecdir@
localedir = @localedir@
localstatedir = @localstatedir@
mandir = @mandir@
mkdir_p = @mkdir_p@
oldincludedir = @oldincludedir@
pdfdir = @pdfdir@
prefix = @prefix@
program_transform_name = @program_transform_name@
psdir = @psdir@
runstatedir = @runstatedir@
sbindir = @sbindir@
sharedstatedir = @sharedstatedir@
srcdir = @srcdir@
sysconfdir = @sysconfdir@
target_alias = @target_alias@
top_build_prefix = @top_build_prefix@
top_builddir = @top_builddir@
top_srcdir = @top_srcdir@
bin_SCRIPTS = parallel sql niceload parcat parset parsort \
env_parallel env_parallel.ash env_parallel.bash \
env_parallel.csh env_parallel.dash env_parallel.fish \
env_parallel.ksh env_parallel.mksh env_parallel.pdksh \
env_parallel.sh env_parallel.tcsh env_parallel.zsh
@DOCUMENTATION_TRUE@man_MANS = parallel.1 env_parallel.1 sem.1 sql.1 niceload.1 \
@DOCUMENTATION_TRUE@ parallel_tutorial.7 parallel_book.7 parallel_design.7 \
@DOCUMENTATION_TRUE@ parallel_alternatives.7 parcat.1 parset.1 parsort.1
@DOCUMENTATION_TRUE@doc_DATA = parallel.html env_parallel.html sem.html sql.html \
@DOCUMENTATION_TRUE@ niceload.html parallel_tutorial.html parallel_book.html \
@DOCUMENTATION_TRUE@ parallel_design.html parallel_alternatives.html parcat.html \
@DOCUMENTATION_TRUE@ parset.html parsort.html \
@DOCUMENTATION_TRUE@ parallel.texi env_parallel.texi sem.texi sql.texi \
@DOCUMENTATION_TRUE@ niceload.texi parallel_tutorial.texi parallel_book.texi \
@DOCUMENTATION_TRUE@ parallel_design.texi parallel_alternatives.texi parcat.texi \
@DOCUMENTATION_TRUE@ parset.texi parsort.texi \
@DOCUMENTATION_TRUE@ parallel.rst env_parallel.rst sem.rst sql.rst \
@DOCUMENTATION_TRUE@ niceload.rst parallel_tutorial.rst parallel_book.rst \
@DOCUMENTATION_TRUE@ parallel_design.rst parallel_alternatives.rst parcat.rst \
@DOCUMENTATION_TRUE@ parset.rst parsort.rst \
@DOCUMENTATION_TRUE@ parallel.pdf env_parallel.pdf sem.pdf sql.pdf niceload.pdf \
@DOCUMENTATION_TRUE@ parallel_tutorial.pdf parallel_book.pdf parallel_design.pdf \
@DOCUMENTATION_TRUE@ parallel_alternatives.pdf parcat.pdf parset.pdf parsort.pdf \
@DOCUMENTATION_TRUE@ parallel_cheat_bw.pdf
DISTCLEANFILES = parallel.1 env_parallel.1 sem.1 sql.1 niceload.1 \
parallel_tutorial.7 parallel_book.7 parallel_design.7 \
parallel_alternatives.7 parcat.1 parset.1 parsort.1 \
parallel.html env_parallel.html sem.html sql.html \
niceload.html parallel_tutorial.html parallel_book.html \
parallel_design.html parallel_alternatives.html parcat.html \
parset.html parsort.html parallel.texi env_parallel.texi \
sem.texi sql.texi niceload.texi parallel_tutorial.texi \
parallel_book.texi parallel_design.texi \
parallel_alternatives.texi parcat.texi parset.texi \
parsort.texi \
parallel.rst env_parallel.rst sem.rst sql.rst niceload.rst \
parallel_tutorial.rst parallel_book.rst parallel_design.rst \
parallel_alternatives.rst parcat.rst parset.rst parsort.rst \
parallel.pdf env_parallel.pdf sem.pdf sql.pdf niceload.pdf \
parallel_tutorial.pdf parallel_book.pdf parallel_design.pdf \
parallel_alternatives.pdf parcat.pdf parset.pdf parsort.pdf \
parallel_cheat_bw.pdf
EXTRA_DIST = parallel sem sql niceload parcat parset parsort \
env_parallel env_parallel.ash env_parallel.bash \
env_parallel.csh env_parallel.dash env_parallel.fish \
env_parallel.ksh env_parallel.mksh env_parallel.pdksh \
env_parallel.sh env_parallel.tcsh env_parallel.zsh parcat.pod \
parset.pod sem.pod parallel.pod env_parallel.pod niceload.pod \
parallel_tutorial.pod parallel_book.pod parallel_design.pod \
parallel_alternatives.pod parallel_cheat_bw.fodt \
$(DISTCLEANFILES)
all: all-am
.SUFFIXES:
$(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps)
@for dep in $?; do \
case '$(am__configure_deps)' in \
*$$dep*) \
( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \
&& { if test -f $@; then exit 0; else break; fi; }; \
exit 1;; \
esac; \
done; \
echo ' cd $(top_srcdir) && $(AUTOMAKE) --foreign src/Makefile'; \
$(am__cd) $(top_srcdir) && \
$(AUTOMAKE) --foreign src/Makefile
Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status
@case '$?' in \
*config.status*) \
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \
*) \
echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles)'; \
cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__maybe_remake_depfiles);; \
esac;
$(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(top_srcdir)/configure: $(am__configure_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(ACLOCAL_M4): $(am__aclocal_m4_deps)
cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh
$(am__aclocal_m4_deps):
install-binSCRIPTS: $(bin_SCRIPTS)
@$(NORMAL_INSTALL)
@list='$(bin_SCRIPTS)'; test -n "$(bindir)" || list=; \
if test -n "$$list"; then \
echo " $(MKDIR_P) '$(DESTDIR)$(bindir)'"; \
$(MKDIR_P) "$(DESTDIR)$(bindir)" || exit 1; \
fi; \
for p in $$list; do \
if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
if test -f "$$d$$p"; then echo "$$d$$p"; echo "$$p"; else :; fi; \
done | \
sed -e 'p;s,.*/,,;n' \
-e 'h;s|.*|.|' \
-e 'p;x;s,.*/,,;$(transform)' | sed 'N;N;N;s,\n, ,g' | \
$(AWK) 'BEGIN { files["."] = ""; dirs["."] = 1; } \
{ d=$$3; if (dirs[d] != 1) { print "d", d; dirs[d] = 1 } \
if ($$2 == $$4) { files[d] = files[d] " " $$1; \
if (++n[d] == $(am__install_max)) { \
print "f", d, files[d]; n[d] = 0; files[d] = "" } } \
else { print "f", d "/" $$4, $$1 } } \
END { for (d in files) print "f", d, files[d] }' | \
while read type dir files; do \
if test "$$dir" = .; then dir=; else dir=/$$dir; fi; \
test -z "$$files" || { \
echo " $(INSTALL_SCRIPT) $$files '$(DESTDIR)$(bindir)$$dir'"; \
$(INSTALL_SCRIPT) $$files "$(DESTDIR)$(bindir)$$dir" || exit $$?; \
} \
; done
uninstall-binSCRIPTS:
@$(NORMAL_UNINSTALL)
@list='$(bin_SCRIPTS)'; test -n "$(bindir)" || exit 0; \
files=`for p in $$list; do echo "$$p"; done | \
sed -e 's,.*/,,;$(transform)'`; \
dir='$(DESTDIR)$(bindir)'; $(am__uninstall_files_from_dir)
install-man1: $(man_MANS)
@$(NORMAL_INSTALL)
@list1=''; \
list2='$(man_MANS)'; \
test -n "$(man1dir)" \
&& test -n "`echo $$list1$$list2`" \
|| exit 0; \
echo " $(MKDIR_P) '$(DESTDIR)$(man1dir)'"; \
$(MKDIR_P) "$(DESTDIR)$(man1dir)" || exit 1; \
{ for i in $$list1; do echo "$$i"; done; \
if test -n "$$list2"; then \
for i in $$list2; do echo "$$i"; done \
| sed -n '/\.1[a-z]*$$/p'; \
fi; \
} | while read p; do \
if test -f $$p; then d=; else d="$(srcdir)/"; fi; \
echo "$$d$$p"; echo "$$p"; \
done | \
sed -e 'n;s,.*/,,;p;h;s,.*\.,,;s,^[^1][0-9a-z]*$$,1,;x' \
-e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,' | \
sed 'N;N;s,\n, ,g' | { \
list=; while read file base inst; do \
if test "$$base" = "$$inst"; then list="$$list $$file"; else \
echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man1dir)/$$inst'"; \
$(INSTALL_DATA) "$$file" "$(DESTDIR)$(man1dir)/$$inst" || exit $$?; \
fi; \
done; \
for i in $$list; do echo "$$i"; done | $(am__base_list) | \
while read files; do \
test -z "$$files" || { \
echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(man1dir)'"; \
$(INSTALL_DATA) $$files "$(DESTDIR)$(man1dir)" || exit $$?; }; \
done; }
uninstall-man1:
@$(NORMAL_UNINSTALL)
@list=''; test -n "$(man1dir)" || exit 0; \
files=`{ for i in $$list; do echo "$$i"; done; \
l2='$(man_MANS)'; for i in $$l2; do echo "$$i"; done | \
sed -n '/\.1[a-z]*$$/p'; \
} | sed -e 's,.*/,,;h;s,.*\.,,;s,^[^1][0-9a-z]*$$,1,;x' \
-e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,'`; \
dir='$(DESTDIR)$(man1dir)'; $(am__uninstall_files_from_dir)
install-man7: $(man_MANS)
@$(NORMAL_INSTALL)
@list1=''; \
list2='$(man_MANS)'; \
test -n "$(man7dir)" \
&& test -n "`echo $$list1$$list2`" \
|| exit 0; \
echo " $(MKDIR_P) '$(DESTDIR)$(man7dir)'"; \
$(MKDIR_P) "$(DESTDIR)$(man7dir)" || exit 1; \
{ for i in $$list1; do echo "$$i"; done; \
if test -n "$$list2"; then \
for i in $$list2; do echo "$$i"; done \
| sed -n '/\.7[a-z]*$$/p'; \
fi; \
} | while read p; do \
if test -f $$p; then d=; else d="$(srcdir)/"; fi; \
echo "$$d$$p"; echo "$$p"; \
done | \
sed -e 'n;s,.*/,,;p;h;s,.*\.,,;s,^[^7][0-9a-z]*$$,7,;x' \
-e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,' | \
sed 'N;N;s,\n, ,g' | { \
list=; while read file base inst; do \
if test "$$base" = "$$inst"; then list="$$list $$file"; else \
echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man7dir)/$$inst'"; \
$(INSTALL_DATA) "$$file" "$(DESTDIR)$(man7dir)/$$inst" || exit $$?; \
fi; \
done; \
for i in $$list; do echo "$$i"; done | $(am__base_list) | \
while read files; do \
test -z "$$files" || { \
echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(man7dir)'"; \
$(INSTALL_DATA) $$files "$(DESTDIR)$(man7dir)" || exit $$?; }; \
done; }
uninstall-man7:
@$(NORMAL_UNINSTALL)
@list=''; test -n "$(man7dir)" || exit 0; \
files=`{ for i in $$list; do echo "$$i"; done; \
l2='$(man_MANS)'; for i in $$l2; do echo "$$i"; done | \
sed -n '/\.7[a-z]*$$/p'; \
} | sed -e 's,.*/,,;h;s,.*\.,,;s,^[^7][0-9a-z]*$$,7,;x' \
-e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,'`; \
dir='$(DESTDIR)$(man7dir)'; $(am__uninstall_files_from_dir)
install-docDATA: $(doc_DATA)
@$(NORMAL_INSTALL)
@list='$(doc_DATA)'; test -n "$(docdir)" || list=; \
if test -n "$$list"; then \
echo " $(MKDIR_P) '$(DESTDIR)$(docdir)'"; \
$(MKDIR_P) "$(DESTDIR)$(docdir)" || exit 1; \
fi; \
for p in $$list; do \
if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \
echo "$$d$$p"; \
done | $(am__base_list) | \
while read files; do \
echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(docdir)'"; \
$(INSTALL_DATA) $$files "$(DESTDIR)$(docdir)" || exit $$?; \
done
uninstall-docDATA:
@$(NORMAL_UNINSTALL)
@list='$(doc_DATA)'; test -n "$(docdir)" || list=; \
files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \
dir='$(DESTDIR)$(docdir)'; $(am__uninstall_files_from_dir)
tags TAGS:
ctags CTAGS:
cscope cscopelist:
distdir: $(BUILT_SOURCES)
$(MAKE) $(AM_MAKEFLAGS) distdir-am
distdir-am: $(DISTFILES)
@srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \
list='$(DISTFILES)'; \
dist_files=`for file in $$list; do echo $$file; done | \
sed -e "s|^$$srcdirstrip/||;t" \
-e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \
case $$dist_files in \
*/*) $(MKDIR_P) `echo "$$dist_files" | \
sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \
sort -u` ;; \
esac; \
for file in $$dist_files; do \
if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \
if test -d $$d/$$file; then \
dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \
if test -d "$(distdir)/$$file"; then \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \
cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \
find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \
fi; \
cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \
else \
test -f "$(distdir)/$$file" \
|| cp -p $$d/$$file "$(distdir)/$$file" \
|| exit 1; \
fi; \
done
check-am: all-am
check: check-am
all-am: Makefile $(SCRIPTS) $(MANS) $(DATA)
installdirs:
for dir in "$(DESTDIR)$(bindir)" "$(DESTDIR)$(man1dir)" "$(DESTDIR)$(man7dir)" "$(DESTDIR)$(docdir)"; do \
test -z "$$dir" || $(MKDIR_P) "$$dir"; \
done
install: install-am
install-exec: install-exec-am
install-data: install-data-am
uninstall: uninstall-am
install-am: all-am
@$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am
installcheck: installcheck-am
install-strip:
if test -z '$(STRIP)'; then \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
install; \
else \
$(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \
install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \
"INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \
fi
mostlyclean-generic:
clean-generic:
distclean-generic:
-test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES)
-test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES)
-test -z "$(DISTCLEANFILES)" || rm -f $(DISTCLEANFILES)
maintainer-clean-generic:
@echo "This command is intended for maintainers to use"
@echo "it deletes files that may require special tools to rebuild."
clean: clean-am
clean-am: clean-generic mostlyclean-am
distclean: distclean-am
-rm -f Makefile
distclean-am: clean-am distclean-generic
dvi: dvi-am
dvi-am:
html: html-am
html-am:
info: info-am
info-am:
install-data-am: install-docDATA install-man
install-dvi: install-dvi-am
install-dvi-am:
install-exec-am: install-binSCRIPTS
@$(NORMAL_INSTALL)
$(MAKE) $(AM_MAKEFLAGS) install-exec-hook
install-html: install-html-am
install-html-am:
install-info: install-info-am
install-info-am:
install-man: install-man1 install-man7
install-pdf: install-pdf-am
install-pdf-am:
install-ps: install-ps-am
install-ps-am:
installcheck-am:
maintainer-clean: maintainer-clean-am
-rm -f Makefile
maintainer-clean-am: distclean-am maintainer-clean-generic
mostlyclean: mostlyclean-am
mostlyclean-am: mostlyclean-generic
pdf: pdf-am
pdf-am:
ps: ps-am
ps-am:
uninstall-am: uninstall-binSCRIPTS uninstall-docDATA uninstall-man
uninstall-man: uninstall-man1 uninstall-man7
.MAKE: install-am install-exec-am install-strip
.PHONY: all all-am check check-am clean clean-generic cscopelist-am \
ctags-am distclean distclean-generic distdir dvi dvi-am html \
html-am info info-am install install-am install-binSCRIPTS \
install-data install-data-am install-docDATA install-dvi \
install-dvi-am install-exec install-exec-am install-exec-hook \
install-html install-html-am install-info install-info-am \
install-man install-man1 install-man7 install-pdf \
install-pdf-am install-ps install-ps-am install-strip \
installcheck installcheck-am installdirs maintainer-clean \
maintainer-clean-generic mostlyclean mostlyclean-generic pdf \
pdf-am ps ps-am tags-am uninstall uninstall-am \
uninstall-binSCRIPTS uninstall-docDATA uninstall-man \
uninstall-man1 uninstall-man7
.PRECIOUS: Makefile
install-exec-hook:
rm "$(DESTDIR)$(bindir)"/sem || true
$(LN_S) parallel "$(DESTDIR)$(bindir)"/sem
# Build documentation file if the tool to build exists.
# Otherwise: Use the distributed version
parallel.1: parallel.pod
pod2man --release='$(PACKAGE_VERSION)' --center='$(PACKAGE_NAME)' \
--section=1 "$(srcdir)"/parallel.pod > "$(srcdir)"/parallel.1n \
&& mv "$(srcdir)"/parallel.1n "$(srcdir)"/parallel.1 \
|| echo "Warning: pod2man not found. Using old parallel.1"
env_parallel.1: env_parallel.pod
pod2man --release='$(PACKAGE_VERSION)' --center='$(PACKAGE_NAME)' \
--section=1 "$(srcdir)"/env_parallel.pod > "$(srcdir)"/env_parallel.1n \
&& mv "$(srcdir)"/env_parallel.1n "$(srcdir)"/env_parallel.1 \
|| echo "Warning: pod2man not found. Using old env_parallel.1"
parallel_tutorial.7: parallel_tutorial.pod
pod2man --release='$(PACKAGE_VERSION)' --center='$(PACKAGE_NAME)' \
--section=7 "$(srcdir)"/parallel_tutorial.pod > "$(srcdir)"/parallel_tutorial.7n \
&& mv "$(srcdir)"/parallel_tutorial.7n "$(srcdir)"/parallel_tutorial.7 \
|| echo "Warning: pod2man not found. Using old parallel_tutorial.7"
parallel_book.7: parallel_book.pod
pod2man --release='$(PACKAGE_VERSION)' --center='$(PACKAGE_NAME)' \
--section=7 "$(srcdir)"/parallel_book.pod > "$(srcdir)"/parallel_book.7n \
&& mv "$(srcdir)"/parallel_book.7n "$(srcdir)"/parallel_book.7 \
|| echo "Warning: pod2man not found. Using old parallel_book.7"
parallel_design.7: parallel_design.pod
pod2man --release='$(PACKAGE_VERSION)' --center='$(PACKAGE_NAME)' \
--section=7 "$(srcdir)"/parallel_design.pod > "$(srcdir)"/parallel_design.7n \
&& mv "$(srcdir)"/parallel_design.7n "$(srcdir)"/parallel_design.7 \
|| echo "Warning: pod2man not found. Using old parallel_design.7"
parallel_alternatives.7: parallel_alternatives.pod
pod2man --release='$(PACKAGE_VERSION)' --center='$(PACKAGE_NAME)' \
--section=7 "$(srcdir)"/parallel_alternatives.pod > "$(srcdir)"/parallel_alternatives.7n \
&& mv "$(srcdir)"/parallel_alternatives.7n "$(srcdir)"/parallel_alternatives.7 \
|| echo "Warning: pod2man not found. Using old parallel_alternatives.7"
sem.1: sem.pod
pod2man --release='$(PACKAGE_VERSION)' --center='$(PACKAGE_NAME)' \
--section=1 "$(srcdir)"/sem.pod > "$(srcdir)"/sem.1n \
&& mv "$(srcdir)"/sem.1n "$(srcdir)"/sem.1 \
|| echo "Warning: pod2man not found. Using old sem.1"
sql.1: sql
pod2man --release='$(PACKAGE_VERSION)' --center='$(PACKAGE_NAME)' \
--section=1 "$(srcdir)"/sql > "$(srcdir)"/sql.1n \
&& mv "$(srcdir)"/sql.1n "$(srcdir)"/sql.1 \
|| echo "Warning: pod2man not found. Using old sql.1"
niceload.1: niceload.pod
pod2man --release='$(PACKAGE_VERSION)' --center='$(PACKAGE_NAME)' \
--section=1 "$(srcdir)"/niceload.pod > "$(srcdir)"/niceload.1n \
&& mv "$(srcdir)"/niceload.1n "$(srcdir)"/niceload.1 \
|| echo "Warning: pod2man not found. Using old niceload.1"
parcat.1: parcat.pod
pod2man --release='$(PACKAGE_VERSION)' --center='$(PACKAGE_NAME)' \
--section=1 "$(srcdir)"/parcat.pod > "$(srcdir)"/parcat.1n \
&& mv "$(srcdir)"/parcat.1n "$(srcdir)"/parcat.1 \
|| echo "Warning: pod2man not found. Using old parcat.1"
parset.1: parset.pod
pod2man --release='$(PACKAGE_VERSION)' --center='$(PACKAGE_NAME)' \
--section=1 "$(srcdir)"/parset.pod > "$(srcdir)"/parset.1n \
&& mv "$(srcdir)"/parset.1n "$(srcdir)"/parset.1 \
|| echo "Warning: pod2man not found. Using old parset.1"
parsort.1: parsort
pod2man --release='$(PACKAGE_VERSION)' --center='$(PACKAGE_NAME)' \
--section=1 "$(srcdir)"/parsort > "$(srcdir)"/parsort.1n \
&& mv "$(srcdir)"/parsort.1n "$(srcdir)"/parsort.1 \
|| echo "Warning: pod2man not found. Using old parsort.1"
parallel.html: parallel.pod
pod2html --title "GNU Parallel" "$(srcdir)"/parallel.pod > "$(srcdir)"/parallel.htmln \
&& mv "$(srcdir)"/parallel.htmln "$(srcdir)"/parallel.html \
|| echo "Warning: pod2html not found. Using old parallel.html"
rm -f "$(srcdir)"/pod2htm*
# Depending on parallel.html to avoid stupid pod2html race condition
env_parallel.html: env_parallel.pod parallel.html
pod2html --title "GNU Parallel with environment" "$(srcdir)"/env_parallel.pod > "$(srcdir)"/env_parallel.htmln \
&& mv "$(srcdir)"/env_parallel.htmln "$(srcdir)"/env_parallel.html \
|| echo "Warning: pod2html not found. Using old env_parallel.html"
rm -f "$(srcdir)"/pod2htm*
# Depending on env_parallel.html to avoid stupid pod2html race condition
parallel_tutorial.html: parallel_tutorial.pod env_parallel.html
pod2html --title "GNU Parallel tutorial" "$(srcdir)"/parallel_tutorial.pod > "$(srcdir)"/parallel_tutorial.htmln \
&& mv "$(srcdir)"/parallel_tutorial.htmln "$(srcdir)"/parallel_tutorial.html \
|| echo "Warning: pod2html not found. Using old parallel_tutorial.html"
rm -f "$(srcdir)"/pod2htm*
# Depending on parallel_tutorial.html to avoid stupid pod2html race condition
parallel_book.html: parallel_book.pod parallel_tutorial.html
pod2html --title "GNU Parallel book" "$(srcdir)"/parallel_book.pod > "$(srcdir)"/parallel_book.htmln \
&& mv "$(srcdir)"/parallel_book.htmln "$(srcdir)"/parallel_book.html \
|| echo "Warning: pod2html not found. Using old parallel_book.html"
rm -f "$(srcdir)"/pod2htm*
# Depending on parallel_book.html to avoid stupid pod2html race condition
parallel_design.html: parallel_design.pod parallel_book.html
pod2html --title "GNU Parallel design" "$(srcdir)"/parallel_design.pod > "$(srcdir)"/parallel_design.htmln \
&& mv "$(srcdir)"/parallel_design.htmln "$(srcdir)"/parallel_design.html \
|| echo "Warning: pod2html not found. Using old parallel_design.html"
rm -f "$(srcdir)"/pod2htm*
# Depending on parallel_design.html to avoid stupid pod2html race condition
parallel_alternatives.html: parallel_alternatives.pod parallel_design.html
pod2html --title "GNU Parallel alternatives" "$(srcdir)"/parallel_alternatives.pod > "$(srcdir)"/parallel_alternatives.htmln \
&& mv "$(srcdir)"/parallel_alternatives.htmln "$(srcdir)"/parallel_alternatives.html \
|| echo "Warning: pod2html not found. Using old parallel_alternatives.html"
rm -f "$(srcdir)"/pod2htm*
# Depending on parallel_alternatives.html to avoid stupid pod2html race condition
sem.html: sem.pod parallel_alternatives.html
pod2html --title "sem (GNU Parallel)" "$(srcdir)"/sem.pod > "$(srcdir)"/sem.htmln \
&& mv "$(srcdir)"/sem.htmln "$(srcdir)"/sem.html \
|| echo "Warning: pod2html not found. Using old sem.html"
rm -f "$(srcdir)"/pod2htm*
# Depending on sem.html to avoid stupid pod2html race condition
sql.html: sql sem.html
pod2html --title "GNU SQL" "$(srcdir)"/sql > "$(srcdir)"/sql.htmln \
&& mv "$(srcdir)"/sql.htmln "$(srcdir)"/sql.html \
|| echo "Warning: pod2html not found. Using old sql.html"
rm -f "$(srcdir)"/pod2htm*
# Depending on sql.html to avoid stupid pod2html race condition
niceload.html: niceload.pod sql.html
pod2html --title "GNU niceload" "$(srcdir)"/niceload.pod > "$(srcdir)"/niceload.htmln \
&& mv "$(srcdir)"/niceload.htmln "$(srcdir)"/niceload.html \
|| echo "Warning: pod2html not found. Using old niceload.html"
rm -f "$(srcdir)"/pod2htm*
# Depending on niceload.html to avoid stupid pod2html race condition
parcat.html: parcat.pod niceload.html
pod2html --title "GNU parcat" "$(srcdir)"/parcat.pod > "$(srcdir)"/parcat.htmln \
&& mv "$(srcdir)"/parcat.htmln "$(srcdir)"/parcat.html \
|| echo "Warning: pod2html not found. Using old parcat.html"
rm -f "$(srcdir)"/pod2htm*
# Depending on parcat.html to avoid stupid pod2html race condition
parset.html: parset.pod parcat.html
pod2html --title "GNU parset" "$(srcdir)"/parset.pod > "$(srcdir)"/parset.htmln \
&& mv "$(srcdir)"/parset.htmln "$(srcdir)"/parset.html \
|| echo "Warning: pod2html not found. Using old parset.html"
rm -f "$(srcdir)"/pod2htm*
# Depending on parset.html to avoid stupid pod2html race condition
parsort.html: parsort parset.html
pod2html --title "GNU parsort" "$(srcdir)"/parsort > "$(srcdir)"/parsort.htmln \
&& mv "$(srcdir)"/parsort.htmln "$(srcdir)"/parsort.html \
|| echo "Warning: pod2html not found. Using old parsort.html"
rm -f "$(srcdir)"/pod2htm*
parallel.texi: parallel.pod
pod2texi --output="$(srcdir)"/parallel.texi "$(srcdir)"/parallel.pod \
|| echo "Warning: pod2texi not found. Using old parallel.texi"
env_parallel.texi: env_parallel.pod
pod2texi --output="$(srcdir)"/env_parallel.texi "$(srcdir)"/env_parallel.pod \
|| echo "Warning: pod2texi not found. Using old env_parallel.texi"
parallel_tutorial.texi: parallel_tutorial.pod
pod2texi --output="$(srcdir)"/parallel_tutorial.texi "$(srcdir)"/parallel_tutorial.pod \
|| echo "Warning: pod2texi not found. Using old parallel_tutorial.texi"
parallel_book.texi: parallel_book.pod
pod2texi --output="$(srcdir)"/parallel_book.texi "$(srcdir)"/parallel_book.pod \
|| echo "Warning: pod2texi not found. Using old parallel_book.texi"
parallel_design.texi: parallel_design.pod
pod2texi --output="$(srcdir)"/parallel_design.texi "$(srcdir)"/parallel_design.pod \
|| echo "Warning: pod2texi not found. Using old parallel_design.texi"
parallel_alternatives.texi: parallel_alternatives.pod
pod2texi --output="$(srcdir)"/parallel_alternatives.texi "$(srcdir)"/parallel_alternatives.pod \
|| echo "Warning: pod2texi not found. Using old parallel_alternatives.texi"
sem.texi: sem.pod
pod2texi --output="$(srcdir)"/sem.texi "$(srcdir)"/sem.pod \
|| echo "Warning: pod2texi not found. Using old sem.texi"
sql.texi: sql
pod2texi --output="$(srcdir)"/sql.texi "$(srcdir)"/sql \
|| echo "Warning: pod2texi not found. Using old sql.texi"
niceload.texi: niceload.pod
pod2texi --output="$(srcdir)"/niceload.texi "$(srcdir)"/niceload.pod \
|| echo "Warning: pod2texi not found. Using old niceload.texi"
parcat.texi: parcat.pod
pod2texi --output="$(srcdir)"/parcat.texi "$(srcdir)"/parcat.pod \
|| echo "Warning: pod2texi not found. Using old parcat.texi"
parset.texi: parset.pod
pod2texi --output="$(srcdir)"/parset.texi "$(srcdir)"/parset.pod \
|| echo "Warning: pod2texi not found. Using old parset.texi"
parsort.texi: parsort
pod2texi --output="$(srcdir)"/parsort.texi "$(srcdir)"/parsort \
|| echo "Warning: pod2texi not found. Using old parsort.texi"
parallel.rst: parallel.pod
pod2rst --outfile "$(srcdir)"/parallel.rst --infile="$(srcdir)"/parallel.pod \
|| echo "Warning: pod2rst not found. Using old parallel.rst"
env_parallel.rst: env_parallel.pod
pod2rst --outfile "$(srcdir)"/env_parallel.rst --infile="$(srcdir)"/env_parallel.pod \
|| echo "Warning: pod2rst not found. Using old env_parallel.rst"
parallel_tutorial.rst: parallel_tutorial.pod
pod2rst --outfile "$(srcdir)"/parallel_tutorial.rst --infile="$(srcdir)"/parallel_tutorial.pod \
|| echo "Warning: pod2rst not found. Using old parallel_tutorial.rst"
parallel_book.rst: parallel_book.pod
pod2rst --outfile "$(srcdir)"/parallel_book.rst --infile="$(srcdir)"/parallel_book.pod \
|| echo "Warning: pod2rst not found. Using old parallel_book.rst"
parallel_design.rst: parallel_design.pod
pod2rst --outfile "$(srcdir)"/parallel_design.rst --infile="$(srcdir)"/parallel_design.pod \
|| echo "Warning: pod2rst not found. Using old parallel_design.rst"
parallel_alternatives.rst: parallel_alternatives.pod
pod2rst --outfile "$(srcdir)"/parallel_alternatives.rst --infile="$(srcdir)"/parallel_alternatives.pod \
|| echo "Warning: pod2rst not found. Using old parallel_alternatives.rst"
sem.rst: sem.pod
pod2rst --outfile "$(srcdir)"/sem.rst --infile="$(srcdir)"/sem.pod \
|| echo "Warning: pod2rst not found. Using old sem.rst"
sql.rst: sql
pod2rst --outfile "$(srcdir)"/sql.rst --infile="$(srcdir)"/sql \
|| echo "Warning: pod2rst not found. Using old sql.rst"
niceload.rst: niceload.pod
pod2rst --outfile "$(srcdir)"/niceload.rst --infile="$(srcdir)"/niceload.pod \
|| echo "Warning: pod2rst not found. Using old niceload.rst"
parcat.rst: parcat.pod
pod2rst --outfile "$(srcdir)"/parcat.rst --infile="$(srcdir)"/parcat.pod \
|| echo "Warning: pod2rst not found. Using old parcat.rst"
parset.rst: parset.pod
pod2rst --outfile "$(srcdir)"/parset.rst --infile="$(srcdir)"/parset.pod \
|| echo "Warning: pod2rst not found. Using old parset.rst"
parsort.rst: parsort
pod2rst --outfile "$(srcdir)"/parsort.rst --infile="$(srcdir)"/parsort \
|| echo "Warning: pod2rst not found. Using old parsort.rst"
parallel.pdf: parallel.pod
pod2pdf --output-file "$(srcdir)"/parallel.pdf "$(srcdir)"/parallel.pod --title "GNU Parallel" \
|| echo "Warning: pod2pdf not found. Using old parallel.pdf"
env_parallel.pdf: env_parallel.pod
pod2pdf --output-file "$(srcdir)"/env_parallel.pdf "$(srcdir)"/env_parallel.pod --title "GNU Parallel with environment" \
|| echo "Warning: pod2pdf not found. Using old env_parallel.pdf"
parallel_tutorial.pdf: parallel_tutorial.pod
pod2pdf --output-file "$(srcdir)"/parallel_tutorial.pdf "$(srcdir)"/parallel_tutorial.pod --title "GNU Parallel Tutorial" \
|| echo "Warning: pod2pdf not found. Using old parallel_tutorial.pdf"
parallel_book.pdf: parallel_book.pod
pod2pdf --output-file "$(srcdir)"/parallel_book.pdf "$(srcdir)"/parallel_book.pod --title "GNU Parallel Book" \
|| echo "Warning: pod2pdf not found. Using old parallel_book.pdf"
parallel_design.pdf: parallel_design.pod
pod2pdf --output-file "$(srcdir)"/parallel_design.pdf "$(srcdir)"/parallel_design.pod --title "GNU Parallel Design" \
|| echo "Warning: pod2pdf not found. Using old parallel_design.pdf"
parallel_alternatives.pdf: parallel_alternatives.pod
pod2pdf --output-file "$(srcdir)"/parallel_alternatives.pdf "$(srcdir)"/parallel_alternatives.pod --title "GNU Parallel alternatives" \
|| echo "Warning: pod2pdf not found. Using old parallel_alternatives.pdf"
sem.pdf: sem.pod
pod2pdf --output-file "$(srcdir)"/sem.pdf "$(srcdir)"/sem.pod --title "GNU sem" \
|| echo "Warning: pod2pdf not found. Using old sem.pdf"
sql.pdf: sql
pod2pdf --output-file "$(srcdir)"/sql.pdf "$(srcdir)"/sql --title "GNU SQL" \
|| echo "Warning: pod2pdf not found. Using old sql.pdf"
niceload.pdf: niceload.pod
pod2pdf --output-file "$(srcdir)"/niceload.pdf "$(srcdir)"/niceload.pod --title "GNU niceload" \
|| echo "Warning: pod2pdf not found. Using old niceload.pdf"
parcat.pdf: parcat.pod
pod2pdf --output-file "$(srcdir)"/parcat.pdf "$(srcdir)"/parcat.pod --title "GNU parcat" \
|| echo "Warning: pod2pdf not found. Using old parcat.pdf"
parset.pdf: parset.pod
pod2pdf --output-file "$(srcdir)"/parset.pdf "$(srcdir)"/parset.pod --title "GNU parset" \
|| echo "Warning: pod2pdf not found. Using old parset.pdf"
parsort.pdf: parsort
pod2pdf --output-file "$(srcdir)"/parsort.pdf "$(srcdir)"/parsort --title "GNU parsort" \
|| echo "Warning: pod2pdf not found. Using old parsort.pdf"
parallel_cheat_bw.pdf: parallel_cheat_bw.fodt
libreoffice --headless --convert-to pdf parallel_cheat_bw.fodt \
|| echo "Warning: libreoffice failed. Using old parallel_cheat_bw.pdf"
sem: parallel
ln -fs parallel sem
# Tell versions [3.59,3.63) of GNU make to not export all variables.
# Otherwise a system limit (for SysV at least) may be exceeded.
.NOEXPORT:

34
src/sphinx/Makefile Normal file
View file

@ -0,0 +1,34 @@
# Minimal makefile for Sphinx documentation
#
# SPDX-FileCopyrightText: 2002-2021 Ole Tange, http://ole.tange.dk and Free Software and Foundation, Inc.
#
# SPDX-License-Identifier: GPL-3.0-or-later
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
SOURCEDIR = source
BUILDDIR = build
all: copy
make html
copy:
cp -a ../*.rst source/
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
clean:
rm -rf "$(BUILDDIR)"
cp "$(SOURCEDIR)"/sphinx.rst "$(SOURCEDIR)"/sphinx.rst.bak
rm "$(SOURCEDIR)"/*.rst
cp "$(SOURCEDIR)"/sphinx.rst.bak "$(SOURCEDIR)"/sphinx.rst

179
src/sphinx/source/conf.py Normal file
View file

@ -0,0 +1,179 @@
# SPDX-FileCopyrightText: 2021 Ole Tange, http://ole.tange.dk and Free Software and Foundation, Inc.
#
# SPDX-License-Identifier: GPL-3.0-or-later
# -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'GNU Parallel'
copyright = '2021, Ole Tange'
author = 'Ole Tange'
# The short X.Y version
version = ''
# The full version, including alpha/beta/rc tags
release = '20210122'
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'sphinx'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = None
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'GNUParalleldoc'
# -- Options for LaTeX output ------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'GNUParallel.tex', 'GNU Parallel Documentation',
'Ole Tange', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'gnuparallel', 'GNU Parallel Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'GNUParallel', 'GNU Parallel Documentation',
author, 'GNUParallel', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
smartquotes = False

View file

@ -0,0 +1,35 @@
.. SPDX-FileCopyrightText: 2021 Ole Tange, http://ole.tange.dk and Free Software and Foundation, Inc.
..
.. SPDX-License-Identifier: GPL-3.0-or-later
.. GNU Parallel documentation master file, created by
sphinx-quickstart on Sat Jan 23 14:06:27 2021.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
Welcome to GNU Parallel's documentation!
========================================
.. toctree::
:maxdepth: 3
:caption: Contents:
parallel
sem
env_parallel
parset
parsort
parallel_design
parallel_tutorial
parallel_alternatives
parcat
niceload
sql
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`