Fixed bug #49785: total_jobs() with --sql*.

Test fails due to bug #49792: --sql* {#} is wrong.
This commit is contained in:
Ole Tange 2016-12-07 13:12:23 +01:00
parent f12701528a
commit 6837861a29
3 changed files with 55 additions and 31 deletions

View file

@ -230,11 +230,14 @@ Haiku of the month:
-- Ole Tange
New in this release:
https://www.msi.umn.edu/support/faq/how-can-i-use-gnu-parallel-run-lot-commands-parallel
https://cs.gmu.edu/~ygingold/singleimage/Decomposing%20Images%20into%20Layers%20via%20RGB-space%20Geometry%20(Tan%20et%20al%202016%20TOG)%20small.pdf
https://arxiv.org/pdf/1511.02895v2.pdf
http://www.tecmint.com/linux-image-conversion-tools/
http://cohesity.com/blog/alls-fair-love-distributed-storage/
http://microbiomejournal.biomedcentral.com/articles/10.1186/s40168-016-0208-8
https://giorgos.sealabs.net/taco-bell-parallel-programming.html
http://www.tecmint.com/linux-image-conversion-tools/
* <<Possibly http://link.springer.com/chapter/10.1007%2F978-3-319-22053-6_46>>

View file

@ -3174,7 +3174,7 @@ sub parse_host_filtering {
} elsif($col[6] eq "127") {
# signal == 127: parallel not installed remote
# Set ncpus and ncores = 1
::warning("Could not figure out ",
::warning("Could not figure out ".
"number of cpus on $host. Using 1.");
$ncores{$host} = 1;
$ncpus{$host} = 1;
@ -6244,37 +6244,41 @@ sub total_jobs {
::error("--pipe is incompatible with --eta/--bar/--shuf");
::wait_and_exit(255);
}
my $record;
my @arg_records;
my $record_queue = $self->{'commandlinequeue'}{'arg_queue'};
my $start = time;
while($record = $record_queue->get()) {
if(time - $start > 10) {
::warning("Reading ".scalar(@arg_records).
" arguments took longer than 10 seconds.");
$opt::eta && ::warning("Consider removing --eta.");
$opt::bar && ::warning("Consider removing --bar.");
$opt::shuf && ::warning("Consider removing --shuf.");
last;
if($opt::sqlworker) {
$self->{'total_jobs'} = $Global::sql->total_jobs();
} else {
my $record;
my @arg_records;
my $record_queue = $self->{'commandlinequeue'}{'arg_queue'};
my $start = time;
while($record = $record_queue->get()) {
if(time - $start > 10) {
::warning("Reading ".scalar(@arg_records).
" arguments took longer than 10 seconds.");
$opt::eta && ::warning("Consider removing --eta.");
$opt::bar && ::warning("Consider removing --bar.");
$opt::shuf && ::warning("Consider removing --shuf.");
last;
}
push @arg_records, $record;
}
push @arg_records, $record;
}
while($record = $record_queue->get()) {
push @arg_records, $record;
}
if($opt::shuf) {
my $i = @arg_records;
while (--$i) {
my $j = int rand($i+1);
@arg_records[$i,$j] = @arg_records[$j,$i];
while($record = $record_queue->get()) {
push @arg_records, $record;
}
if($opt::shuf) {
my $i = @arg_records;
while (--$i) {
my $j = int rand($i+1);
@arg_records[$i,$j] = @arg_records[$j,$i];
}
}
$record_queue->unget(@arg_records);
$self->{'total_jobs'} =
::ceil((1+$#arg_records+$self->{'this_job_no'})
/ ::max($Global::max_number_of_args,1));
::debug("init","Total jobs: ".$self->{'total_jobs'}.
" (".(1+$#arg_records)."+".$self->{'this_job_no'}.")\n");
}
$record_queue->unget(@arg_records);
$self->{'total_jobs'} =
::ceil((1+$#arg_records+$self->{'this_job_no'})
/ ::max($Global::max_number_of_args,1));
::debug("init","Total jobs: ".$self->{'total_jobs'}.
" (".(1+$#arg_records)."+".$self->{'this_job_no'}.")\n");
}
return $self->{'total_jobs'};
}
@ -10471,6 +10475,18 @@ sub get_record {
}
}
sub total_jobs {
my $self = shift;
my @retval;
my $table = $self->table();
my $v = $self->get("SELECT count(*) FROM $table;");
if($v->[0]) {
return $v->[0]->[0];
} else {
::die_bug("SQL::total_jobs");
}
}
sub finished {
# Check if there are any jobs left in the SQL table that do not
# have a "real" exitval

View file

@ -30,8 +30,9 @@ p_wrapper() {
}
p_template() {
(sleep 2; parallel "$@" --sqlworker $DBURL sleep .3\;echo >$T1) &
parallel "$@" --sqlandworker $DBURL sleep .3\;echo ::: {1..5} ::: {a..e} >$T2;
(sleep 2;
parallel --sqlworker $DBURL "$@" sleep .3\;echo >$T1) &
parallel --sqlandworker $DBURL "$@" sleep .3\;echo ::: {1..5} ::: {a..e} >$T2;
}
export -f p_template
@ -67,6 +68,10 @@ par_sqlandworker_unbuffer() {
p_template -u
}
par_sqlandworker_total_jobs() {
p_template echo {#} of '{=1 $_=total_jobs(); =};'
}
export -f $(compgen -A function | egrep 'p_|par_')
# Tested that -j0 in parallel is fastest (up to 15 jobs)