mirror of
https://git.savannah.gnu.org/git/parallel.git
synced 2024-11-21 21:47:54 +00:00
Released as 20120220 alpha.
This commit is contained in:
parent
211ffd7898
commit
587c5e23be
34
NEWS
34
NEWS
|
@ -1,3 +1,37 @@
|
|||
20120222
|
||||
|
||||
* --workdir . will use the current working dir. If the current working
|
||||
dir is beneath your home dir, the value . is treated as the relative
|
||||
path to your home dir. This means that if your home dir is different
|
||||
on remote computers (e.g. if your login is different) the relative
|
||||
path will still be relative to your home dir.
|
||||
|
||||
* A texinfo file is now generated from the POD file using pod2texi.
|
||||
|
||||
* The bioinformatics article "Fast large-scale clustering of protein
|
||||
structures using Gauss integrals" mentions GNU Parallel. They forgot
|
||||
to add the reference. Please remember --bibtex if you use GNU
|
||||
Parallel in an article.
|
||||
http://bioinformatics.oxfordjournals.org/content/28/4/510.short
|
||||
|
||||
* The first supporter received his merchandise. Did you order yours?
|
||||
https://www.gnu.org/software/parallel/merchandise.html
|
||||
|
||||
* How to use GNU Parallel with samtools.
|
||||
http://zvfak.blogspot.com/2012/02/samtools-in-parallel.html
|
||||
|
||||
* How to use GNU Parallel for vacuuming PostgreSQL tables.
|
||||
http://blog.timmattison.com/archives/2012/01/24/mini-hack-parallel-vacuuming-in-postgresql/
|
||||
|
||||
* Converting e-books with Calibre and GNU Parallel.
|
||||
http://www.linkhal.de/blog/2012/01/converting-massive-e-book-collections-with-calibre-and-gnu-parrallel/
|
||||
|
||||
* Using GNU Parallel for tailing logfiles from multiple servers.
|
||||
http://silviud.blogspot.com/2012/02/shell-parallel-processing.html
|
||||
|
||||
* Bug fixes and man page updates.
|
||||
|
||||
|
||||
20120122
|
||||
|
||||
* --header : uses the first input line as column names and you can
|
||||
|
|
20
configure
vendored
20
configure
vendored
|
@ -1,6 +1,6 @@
|
|||
#! /bin/sh
|
||||
# Guess values for system-dependent variables and create Makefiles.
|
||||
# Generated by GNU Autoconf 2.68 for parallel 20120124.
|
||||
# Generated by GNU Autoconf 2.68 for parallel 20120220.
|
||||
#
|
||||
# Report bugs to <bug-parallel@gnu.org>.
|
||||
#
|
||||
|
@ -559,8 +559,8 @@ MAKEFLAGS=
|
|||
# Identity of this package.
|
||||
PACKAGE_NAME='parallel'
|
||||
PACKAGE_TARNAME='parallel'
|
||||
PACKAGE_VERSION='20120124'
|
||||
PACKAGE_STRING='parallel 20120124'
|
||||
PACKAGE_VERSION='20120220'
|
||||
PACKAGE_STRING='parallel 20120220'
|
||||
PACKAGE_BUGREPORT='bug-parallel@gnu.org'
|
||||
PACKAGE_URL=''
|
||||
|
||||
|
@ -1176,7 +1176,7 @@ if test "$ac_init_help" = "long"; then
|
|||
# Omit some internal or obsolete options to make the list less imposing.
|
||||
# This message is too long to be a string in the A/UX 3.1 sh.
|
||||
cat <<_ACEOF
|
||||
\`configure' configures parallel 20120124 to adapt to many kinds of systems.
|
||||
\`configure' configures parallel 20120220 to adapt to many kinds of systems.
|
||||
|
||||
Usage: $0 [OPTION]... [VAR=VALUE]...
|
||||
|
||||
|
@ -1242,7 +1242,7 @@ fi
|
|||
|
||||
if test -n "$ac_init_help"; then
|
||||
case $ac_init_help in
|
||||
short | recursive ) echo "Configuration of parallel 20120124:";;
|
||||
short | recursive ) echo "Configuration of parallel 20120220:";;
|
||||
esac
|
||||
cat <<\_ACEOF
|
||||
|
||||
|
@ -1309,7 +1309,7 @@ fi
|
|||
test -n "$ac_init_help" && exit $ac_status
|
||||
if $ac_init_version; then
|
||||
cat <<\_ACEOF
|
||||
parallel configure 20120124
|
||||
parallel configure 20120220
|
||||
generated by GNU Autoconf 2.68
|
||||
|
||||
Copyright (C) 2010 Free Software Foundation, Inc.
|
||||
|
@ -1326,7 +1326,7 @@ cat >config.log <<_ACEOF
|
|||
This file contains any messages produced by compilers while
|
||||
running configure, to aid debugging if configure makes a mistake.
|
||||
|
||||
It was created by parallel $as_me 20120124, which was
|
||||
It was created by parallel $as_me 20120220, which was
|
||||
generated by GNU Autoconf 2.68. Invocation command line was
|
||||
|
||||
$ $0 $@
|
||||
|
@ -2141,7 +2141,7 @@ fi
|
|||
|
||||
# Define the identity of the package.
|
||||
PACKAGE='parallel'
|
||||
VERSION='20120124'
|
||||
VERSION='20120220'
|
||||
|
||||
|
||||
cat >>confdefs.h <<_ACEOF
|
||||
|
@ -2704,7 +2704,7 @@ cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1
|
|||
# report actual input values of CONFIG_FILES etc. instead of their
|
||||
# values after options handling.
|
||||
ac_log="
|
||||
This file was extended by parallel $as_me 20120124, which was
|
||||
This file was extended by parallel $as_me 20120220, which was
|
||||
generated by GNU Autoconf 2.68. Invocation command line was
|
||||
|
||||
CONFIG_FILES = $CONFIG_FILES
|
||||
|
@ -2766,7 +2766,7 @@ _ACEOF
|
|||
cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1
|
||||
ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`"
|
||||
ac_cs_version="\\
|
||||
parallel config.status 20120124
|
||||
parallel config.status 20120220
|
||||
configured by $0, generated by GNU Autoconf 2.68,
|
||||
with options \\"\$ac_cs_config\\"
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
AC_INIT([parallel], [20120124], [bug-parallel@gnu.org])
|
||||
AC_INIT([parallel], [20120220], [bug-parallel@gnu.org])
|
||||
AM_INIT_AUTOMAKE([-Wall -Werror foreign])
|
||||
AC_CONFIG_HEADERS([config.h])
|
||||
AC_CONFIG_FILES([
|
||||
|
|
|
@ -41,8 +41,6 @@ surprising file names containing space, ', or " in the filename.
|
|||
xargs can give nasty surprises due to the separator problem
|
||||
http://pi.dk/5
|
||||
|
||||
GNU Parallel http://nd.gd/0s may be better.
|
||||
|
||||
@jaylyerly @stevenf xargs will bite you if file names contain
|
||||
space http://pi.dk/5. Use GNU Parallel instead: http://pi.dk/0
|
||||
|
||||
|
|
|
@ -193,12 +193,21 @@ New in this release:
|
|||
Parallel in an article.
|
||||
http://bioinformatics.oxfordjournals.org/content/28/4/510.short
|
||||
|
||||
* First supporter received his merchandise. Did you order yours?
|
||||
* The first supporter received his merchandise. Did you order yours?
|
||||
https://www.gnu.org/software/parallel/merchandise.html
|
||||
|
||||
* How to use GNU Parallel with samtools.
|
||||
http://zvfak.blogspot.com/2012/02/samtools-in-parallel.html
|
||||
|
||||
* How to use GNU Parallel for vacuuming PostgreSQL tables.
|
||||
http://blog.timmattison.com/archives/2012/01/24/mini-hack-parallel-vacuuming-in-postgresql/
|
||||
|
||||
* Converting e-books with Calibre and GNU Parallel.
|
||||
http://www.linkhal.de/blog/2012/01/converting-massive-e-book-collections-with-calibre-and-gnu-parrallel/
|
||||
|
||||
* Using GNU Parallel for tailing logfiles from multiple servers.
|
||||
http://silviud.blogspot.com/2012/02/shell-parallel-processing.html
|
||||
|
||||
* Bug fixes and man page updates.
|
||||
|
||||
|
||||
|
|
|
@ -38,16 +38,20 @@ niceload.html: niceload.pod Makefile sql.html
|
|||
rm -f $(srcdir)/pod2htm*
|
||||
|
||||
parallel.texi: parallel.pod
|
||||
pod2texi --output=$(srcdir)/parallel.texi $(srcdir)/parallel.pod
|
||||
# If pod2texi is not installed: Forget about it
|
||||
pod2texi --output=$(srcdir)/parallel.texi $(srcdir)/parallel.pod || true
|
||||
|
||||
sem.texi: sem.pod
|
||||
pod2texi --output=$(srcdir)/sem.texi $(srcdir)/sem.pod
|
||||
# If pod2texi is not installed: Forget about it
|
||||
pod2texi --output=$(srcdir)/sem.texi $(srcdir)/sem.pod || true
|
||||
|
||||
sql.texi: sql
|
||||
pod2texi --output=$(srcdir)/sql.texi $(srcdir)/sql
|
||||
# If pod2texi is not installed: Forget about it
|
||||
pod2texi --output=$(srcdir)/sql.texi $(srcdir)/sql || true
|
||||
|
||||
niceload.texi: niceload.pod
|
||||
pod2texi --output=$(srcdir)/niceload.texi $(srcdir)/niceload.pod
|
||||
# If pod2texi is not installed: Forget about it
|
||||
pod2texi --output=$(srcdir)/niceload.texi $(srcdir)/niceload.pod || true
|
||||
|
||||
sem: parallel
|
||||
ln -fs parallel sem
|
||||
|
|
|
@ -483,16 +483,20 @@ niceload.html: niceload.pod Makefile sql.html
|
|||
rm -f $(srcdir)/pod2htm*
|
||||
|
||||
parallel.texi: parallel.pod
|
||||
pod2texi --output=$(srcdir)/parallel.texi $(srcdir)/parallel.pod
|
||||
# If pod2texi is not installed: Forget about it
|
||||
pod2texi --output=$(srcdir)/parallel.texi $(srcdir)/parallel.pod || true
|
||||
|
||||
sem.texi: sem.pod
|
||||
pod2texi --output=$(srcdir)/sem.texi $(srcdir)/sem.pod
|
||||
# If pod2texi is not installed: Forget about it
|
||||
pod2texi --output=$(srcdir)/sem.texi $(srcdir)/sem.pod || true
|
||||
|
||||
sql.texi: sql
|
||||
pod2texi --output=$(srcdir)/sql.texi $(srcdir)/sql
|
||||
# If pod2texi is not installed: Forget about it
|
||||
pod2texi --output=$(srcdir)/sql.texi $(srcdir)/sql || true
|
||||
|
||||
niceload.texi: niceload.pod
|
||||
pod2texi --output=$(srcdir)/niceload.texi $(srcdir)/niceload.pod
|
||||
# If pod2texi is not installed: Forget about it
|
||||
pod2texi --output=$(srcdir)/niceload.texi $(srcdir)/niceload.pod || true
|
||||
|
||||
sem: parallel
|
||||
ln -fs parallel sem
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
use strict;
|
||||
use Getopt::Long;
|
||||
$Global::progname="niceload";
|
||||
$Global::version = 20120124;
|
||||
$Global::version = 20120220;
|
||||
Getopt::Long::Configure("bundling","require_order");
|
||||
get_options_from_array(\@ARGV) || die_usage();
|
||||
if($::opt_version) {
|
||||
|
|
|
@ -543,7 +543,7 @@ sub get_options_from_array {
|
|||
sub parse_options {
|
||||
# Returns: N/A
|
||||
# Defaults:
|
||||
$Global::version = 20120215;
|
||||
$Global::version = 20120220;
|
||||
$Global::progname = 'parallel';
|
||||
$Global::infinity = 2**31;
|
||||
$Global::debug = 0;
|
||||
|
|
|
@ -500,7 +500,7 @@ status will be the exit status from the failing job.
|
|||
=back
|
||||
|
||||
|
||||
=item B<--header> I<regexp> (alpha testing)
|
||||
=item B<--header> I<regexp> (beta testing)
|
||||
|
||||
Use upto regexp as header. For normal usage the matched header
|
||||
(typically the first line: B<--header '\n'>) will be split using
|
||||
|
@ -743,9 +743,9 @@ Instead of printing the output to stdout (standard output) the output
|
|||
of each job is saved in a file and the filename is then printed.
|
||||
|
||||
|
||||
=item B<--pipe> (beta testing)
|
||||
=item B<--pipe>
|
||||
|
||||
=item B<--spreadstdin> (beta testing)
|
||||
=item B<--spreadstdin>
|
||||
|
||||
Spread input to jobs on stdin (standard input). Read a block of data
|
||||
from stdin (standard input) and give one block of data as input to one
|
||||
|
@ -937,7 +937,7 @@ it to the command.
|
|||
Only used with B<--pipe>.
|
||||
|
||||
|
||||
=item B<--resume> (alpha testing)
|
||||
=item B<--resume> (beta testing)
|
||||
|
||||
Resumes from the last unfinished job. By reading B<--joblog> GNU
|
||||
B<parallel> will figure out the last unfinished job and continue from
|
||||
|
@ -1114,9 +1114,9 @@ B<--sshlogin> is often used with B<--transfer>, B<--return>,
|
|||
B<--cleanup>, and B<--trc>.
|
||||
|
||||
|
||||
=item B<--sshloginfile> I<filename> (alpha testing)
|
||||
=item B<--sshloginfile> I<filename> (beta testing)
|
||||
|
||||
=item B<--slf> I<filename> (alpha testing)
|
||||
=item B<--slf> I<filename> (beta testing)
|
||||
|
||||
File with sshlogins. The file consists of sshlogins on separate
|
||||
lines. Empty lines and lines starting with '#' are ignored. Example:
|
||||
|
@ -1196,7 +1196,7 @@ different dir for the files. Setting B<--tmpdir> is equivalent to
|
|||
setting $TMPDIR.
|
||||
|
||||
|
||||
=item B<--timeout> I<sec> (beta testing)
|
||||
=item B<--timeout> I<sec>
|
||||
|
||||
Time out for command. If the command runs for longer than I<sec>
|
||||
seconds it will get killed with SIGTERM, followed by SIGTERM 200 ms
|
||||
|
@ -1322,9 +1322,9 @@ Use B<-v> B<-v> to print the wrapping ssh command when running remotely.
|
|||
Print the version GNU B<parallel> and exit.
|
||||
|
||||
|
||||
=item B<--workdir> I<mydir>
|
||||
=item B<--workdir> I<mydir> (alpha testing)
|
||||
|
||||
=item B<--wd> I<mydir>
|
||||
=item B<--wd> I<mydir> (alpha testing)
|
||||
|
||||
Files transferred using B<--transfer> and B<--return> will be relative
|
||||
to I<mydir> on remote computers, and the command will be executed in
|
||||
|
|
|
@ -531,8 +531,8 @@ status will be the exit status from the failing job.
|
|||
|
||||
@end table
|
||||
|
||||
@item @strong{--header} @emph{regexp} (alpha testing)
|
||||
@anchor{@strong{--header} @emph{regexp} (alpha testing)}
|
||||
@item @strong{--header} @emph{regexp} (beta testing)
|
||||
@anchor{@strong{--header} @emph{regexp} (beta testing)}
|
||||
|
||||
Use upto regexp as header. For normal usage the matched header
|
||||
(typically the first line: @strong{--header '\n'}) will be split using
|
||||
|
@ -798,11 +798,11 @@ all the output from one server will be grouped together.
|
|||
Instead of printing the output to stdout (standard output) the output
|
||||
of each job is saved in a file and the filename is then printed.
|
||||
|
||||
@item @strong{--pipe} (beta testing)
|
||||
@anchor{@strong{--pipe} (beta testing)}
|
||||
@item @strong{--pipe}
|
||||
@anchor{@strong{--pipe}}
|
||||
|
||||
@item @strong{--spreadstdin} (beta testing)
|
||||
@anchor{@strong{--spreadstdin} (beta testing)}
|
||||
@item @strong{--spreadstdin}
|
||||
@anchor{@strong{--spreadstdin}}
|
||||
|
||||
Spread input to jobs on stdin (standard input). Read a block of data
|
||||
from stdin (standard input) and give one block of data as input to one
|
||||
|
@ -1002,8 +1002,8 @@ it to the command.
|
|||
|
||||
Only used with @strong{--pipe}.
|
||||
|
||||
@item @strong{--resume} (alpha testing)
|
||||
@anchor{@strong{--resume} (alpha testing)}
|
||||
@item @strong{--resume} (beta testing)
|
||||
@anchor{@strong{--resume} (beta testing)}
|
||||
|
||||
Resumes from the last unfinished job. By reading @strong{--joblog} GNU
|
||||
@strong{parallel} will figure out the last unfinished job and continue from
|
||||
|
@ -1190,11 +1190,11 @@ The remote host must have GNU @strong{parallel} installed.
|
|||
@strong{--sshlogin} is often used with @strong{--transfer}, @strong{--return},
|
||||
@strong{--cleanup}, and @strong{--trc}.
|
||||
|
||||
@item @strong{--sshloginfile} @emph{filename} (alpha testing)
|
||||
@anchor{@strong{--sshloginfile} @emph{filename} (alpha testing)}
|
||||
@item @strong{--sshloginfile} @emph{filename} (beta testing)
|
||||
@anchor{@strong{--sshloginfile} @emph{filename} (beta testing)}
|
||||
|
||||
@item @strong{--slf} @emph{filename} (alpha testing)
|
||||
@anchor{@strong{--slf} @emph{filename} (alpha testing)}
|
||||
@item @strong{--slf} @emph{filename} (beta testing)
|
||||
@anchor{@strong{--slf} @emph{filename} (beta testing)}
|
||||
|
||||
File with sshlogins. The file consists of sshlogins on separate
|
||||
lines. Empty lines and lines starting with '#' are ignored. Example:
|
||||
|
@ -1275,8 +1275,8 @@ into temporary files in /tmp. By setting @strong{--tmpdir} you can use a
|
|||
different dir for the files. Setting @strong{--tmpdir} is equivalent to
|
||||
setting $TMPDIR.
|
||||
|
||||
@item @strong{--timeout} @emph{sec} (beta testing)
|
||||
@anchor{@strong{--timeout} @emph{sec} (beta testing)}
|
||||
@item @strong{--timeout} @emph{sec}
|
||||
@anchor{@strong{--timeout} @emph{sec}}
|
||||
|
||||
Time out for command. If the command runs for longer than @emph{sec}
|
||||
seconds it will get killed with SIGTERM, followed by SIGTERM 200 ms
|
||||
|
@ -1413,11 +1413,11 @@ Use @strong{-v} @strong{-v} to print the wrapping ssh command when running remot
|
|||
|
||||
Print the version GNU @strong{parallel} and exit.
|
||||
|
||||
@item @strong{--workdir} @emph{mydir}
|
||||
@anchor{@strong{--workdir} @emph{mydir}}
|
||||
@item @strong{--workdir} @emph{mydir} (alpha testing)
|
||||
@anchor{@strong{--workdir} @emph{mydir} (alpha testing)}
|
||||
|
||||
@item @strong{--wd} @emph{mydir}
|
||||
@anchor{@strong{--wd} @emph{mydir}}
|
||||
@item @strong{--wd} @emph{mydir} (alpha testing)
|
||||
@anchor{@strong{--wd} @emph{mydir} (alpha testing)}
|
||||
|
||||
Files transferred using @strong{--transfer} and @strong{--return} will be relative
|
||||
to @emph{mydir} on remote computers, and the command will be executed in
|
||||
|
|
2
src/sql
2
src/sql
|
@ -556,7 +556,7 @@ $Global::Initfile && unlink $Global::Initfile;
|
|||
exit ($err);
|
||||
|
||||
sub parse_options {
|
||||
$Global::version = 20120124;
|
||||
$Global::version = 20120220;
|
||||
$Global::progname = 'sql';
|
||||
|
||||
# This must be done first as this may exec myself
|
||||
|
|
|
@ -49,7 +49,7 @@ niceload -H -l 9.9 uptime | grep ':.[1-9][0-9].[0-9][0-9],' || echo OK
|
|||
niceload --hard -l 9 uptime | grep ':.[1-9][0-9].[0-9][0-9],' || echo OK
|
||||
|
||||
echo '### -f and --factor'
|
||||
niceload -H -f 0.1 -l6 echo f 0.1 first &
|
||||
niceload -H -f 0.01 -l6 echo f 0.1 first &
|
||||
niceload -H --factor 10 -l6 echo factor 10 last &
|
||||
wait
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ echo '### bug #34422: parallel -X --eta crashes with div by zero'
|
|||
seq 2 | stdout parallel -X --eta echo
|
||||
|
||||
echo '### --timeout on remote machines'
|
||||
parallel -j0 --timeout 3 --onall -S localhost,parallel@parallel-server1 'sleep {}; echo slept {}' ::: 1 8 9 ; echo jobs failed: $?
|
||||
parallel -j0 --timeout 4 --onall -S localhost,parallel@parallel-server1 'sleep {}; echo slept {}' ::: 1 8 9 ; echo jobs failed: $?
|
||||
|
||||
echo '### --pipe without command'
|
||||
seq -w 10 | stdout parallel --pipe
|
||||
|
|
|
@ -85,7 +85,7 @@ echo '### Test -N odd';
|
|||
seq 1 11 | parallel -j2 -k -N 2 --pipe cat";echo ole;sleep 0.\$PARALLEL_SEQ"
|
||||
|
||||
echo '### Test -N even+2';
|
||||
seq 1 12 | parallel -j2 -k -N 2 --pipe cat";echo ole;sleep 0.\$PARALLEL_SEQ"
|
||||
seq 1 12 | parallel -j2 -k -N 2 --pipe cat";echo ole;sleep 1.\$PARALLEL_SEQ"
|
||||
|
||||
echo '### Test --recstart + --recend';
|
||||
cat /tmp/blocktest | parallel --block 1m -k --recstart 44 --recend "44" -j10 --pipe sort -n |md5sum
|
||||
|
|
Loading…
Reference in a new issue