From d6062f7c450c2cde5aa37e1a9a883ffe90622941 Mon Sep 17 00:00:00 2001 From: Robert Stone Date: Sat, 21 Oct 2017 12:15:08 -0700 Subject: [PATCH] [Perl] emulate Python zip() for Perl (#8192) * [Perl] emulate Python zip() for Perl * [Perl] retool zip() uses away from the callback form --- .../AI-MXNet/lib/AI/MXNet/AutoGrad.pm | 8 +- perl-package/AI-MXNet/lib/AI/MXNet/Base.pm | 23 ++-- .../AI-MXNet/lib/AI/MXNet/Executor/Group.pm | 74 ++++++------ .../AI-MXNet/lib/AI/MXNet/Gluon/Block.pm | 24 ++-- .../AI-MXNet/lib/AI/MXNet/Gluon/Parameter.pm | 8 +- .../AI-MXNet/lib/AI/MXNet/Gluon/RNN/Cell.pm | 14 +-- .../AI-MXNet/lib/AI/MXNet/Gluon/RNN/Layer.pm | 6 +- .../AI-MXNet/lib/AI/MXNet/Gluon/Trainer.pm | 8 +- .../AI-MXNet/lib/AI/MXNet/Gluon/Utils.pm | 8 +- perl-package/AI-MXNet/lib/AI/MXNet/KVStore.pm | 6 +- perl-package/AI-MXNet/lib/AI/MXNet/Metric.pm | 66 +++++------ perl-package/AI-MXNet/lib/AI/MXNet/Module.pm | 12 +- perl-package/AI-MXNet/lib/AI/MXNet/Monitor.pm | 12 +- perl-package/AI-MXNet/lib/AI/MXNet/NDArray.pm | 12 +- .../AI-MXNet/lib/AI/MXNet/NDArray/Slice.pm | 20 ++-- .../AI-MXNet/lib/AI/MXNet/RNN/Cell.pm | 18 +-- perl-package/AI-MXNet/lib/AI/MXNet/Symbol.pm | 6 +- perl-package/AI-MXNet/t/test_autograd.t | 6 +- perl-package/AI-MXNet/t/test_base.t | 107 ++++++++++++++++++ perl-package/AI-MXNet/t/test_model_parallel.t | 6 +- perl-package/AI-MXNet/t/test_module.t | 8 +- .../AI-MXNet/t/test_multi_device_exec.t | 6 +- perl-package/AI-MXNetCAPI/mxnet.i | 37 ++++++ 23 files changed, 318 insertions(+), 177 deletions(-) create mode 100644 perl-package/AI-MXNet/t/test_base.t diff --git a/perl-package/AI-MXNet/lib/AI/MXNet/AutoGrad.pm b/perl-package/AI-MXNet/lib/AI/MXNet/AutoGrad.pm index b49c0b69c52f..221840e300aa 100644 --- a/perl-package/AI-MXNet/lib/AI/MXNet/AutoGrad.pm +++ b/perl-package/AI-MXNet/lib/AI/MXNet/AutoGrad.pm @@ -333,10 +333,10 @@ method grad( ); my @ret; - zip(sub { - my ($handle, $stype) = @_; + for(zip($grad_vars, $grad_stypes)) { + my ($handle, $stype) = @$_; push @ret, AI::MXNet::NDArray->new(handle => $handle, stype => $stype); - }, $grad_vars, $grad_stypes); + } if(blessed $variables) { return $ret[0]; @@ -474,4 +474,4 @@ func _parse_head($heads, $head_grads) return (\@head_handles, \@hgrad_handles); } -1; \ No newline at end of file +1; diff --git a/perl-package/AI-MXNet/lib/AI/MXNet/Base.pm b/perl-package/AI-MXNet/lib/AI/MXNet/Base.pm index a8da8470f574..f748ecbe1f37 100644 --- a/perl-package/AI-MXNet/lib/AI/MXNet/Base.pm +++ b/perl-package/AI-MXNet/lib/AI/MXNet/Base.pm @@ -120,12 +120,17 @@ use constant GRAD_REQ_MAP => { sub zip { - my ($sub, @arrays) = @_; - my $len = @{ $arrays[0] }; - for (my $i = 0; $i < $len; $i++) + if('CODE' eq ref $_[0]) { - $sub->(map { $_->[$i] } @arrays); + # continue supporting the callback style + my $code = shift; + $code->(@$_) for AI::MXNetCAPI::py_zip(map { \@$_ } @_); + return; } + # the map() here may seem like a no-op, but triggers overloading or + # whatever else is needed to make array-ish things actually arrays + # before entering the low level list builder. + return AI::MXNetCAPI::py_zip(map { \@$_ } @_); } =head2 enumerate @@ -270,16 +275,14 @@ sub build_param_doc $remove_dup //= 1; my %param_keys; my @param_str; - zip(sub { - my ($key, $type_info, $desc) = @_; - return if exists $param_keys{$key} and $remove_dup; + for(zip($arg_names, $arg_types, $arg_descs)) { + my ($key, $type_info, $desc) = @$_; + next if exists $param_keys{$key} and $remove_dup; $param_keys{$key} = 1; my $ret = sprintf("%s : %s", $key, $type_info); $ret .= "\n ".$desc if length($desc); push @param_str, $ret; - }, - $arg_names, $arg_types, $arg_descs - ); + } return sprintf("Parameters\n----------\n%s\n", join("\n", @param_str)); } diff --git a/perl-package/AI-MXNet/lib/AI/MXNet/Executor/Group.pm b/perl-package/AI-MXNet/lib/AI/MXNet/Executor/Group.pm index 7ac054333c13..acacffde1ee2 100644 --- a/perl-package/AI-MXNet/lib/AI/MXNet/Executor/Group.pm +++ b/perl-package/AI-MXNet/lib/AI/MXNet/Executor/Group.pm @@ -57,18 +57,18 @@ func _split_input_slice($batch_size, $work_load_list) # Load a array ref of arrays into a array ref of arrays specified by slices func _load_general($data, $targets, $major_axis) { - zip(sub { - my ($d_src, $d_targets, $axis) = @_; + for(zip($data, $targets, $major_axis)) { + my ($d_src, $d_targets, $axis) = @$_; if(blessed($d_targets) and $d_targets->isa('AI::MXNet::NDarray')) { $d_src->copyto($d_targets); } elsif(ref $d_targets eq 'ARRAY' and blessed $d_targets->[0]) { - zip(sub { - my ($src, $dst) = @_; + for(zip($d_src, $d_targets)) { + my ($src, $dst) = @$_; $src->copyto($dst); - }, $d_src, $d_targets); + } } else { @@ -124,7 +124,7 @@ func _load_general($data, $targets, $major_axis) } } } - }, $data, $targets, $major_axis); + } } # Load data into sliced arrays @@ -144,8 +144,8 @@ func _load_label($batch, $targets, $major_axis) func _merge_multi_context($outputs, $major_axis) { my @rets; - zip(sub { - my ($tensors, $axis) = @_; + for(zip($outputs, $major_axis)) { + my ($tensors, $axis) = @$_; if($axis >= 0) { if(@$tensors == 1) @@ -165,7 +165,7 @@ func _merge_multi_context($outputs, $major_axis) # first one, without checking they are actually the same push @rets, $tensors->[0]; } - }, $outputs, $major_axis); + } return \@rets; } @@ -353,9 +353,9 @@ method decide_slices(ArrayRef[AI::MXNet::DataDesc] $data_shapes) { confess("empty data_shapes array") unless @{ $data_shapes } > 0; my $major_axis = [map { AI::MXNet::DataDesc->get_batch_axis($_->layout) } @{ $data_shapes }]; - zip(sub { - my ($desc, $axis) = @_; - return if($axis == -1); + for(zip($data_shapes, $major_axis)) { + my ($desc, $axis) = @$_; + next if($axis == -1); my $batch_size = $desc->shape->[$axis]; if(defined $self->_p->batch_size) { @@ -370,7 +370,7 @@ method decide_slices(ArrayRef[AI::MXNet::DataDesc] $data_shapes) $self->_p->batch_size($batch_size); $self->_p->slices(AI::MXNet::Executor::Group::_split_input_slice($self->_p->batch_size, $self->workload)); } - }, $data_shapes, $major_axis); + } return $major_axis; } @@ -590,16 +590,16 @@ method set_params(HashRef[AI::MXNet::NDArray] $arg_params, HashRef[AI::MXNet::ND method get_params(HashRef[AI::MXNet::NDArray] $arg_params, HashRef[AI::MXNet::NDArray] $aux_params) { my $weight = 0; - zip(sub { - my ($name, $block) = @_; + for(zip($self->param_names, $self->_p->param_arrays)) { + my ($name, $block) = @$_; my $weight = sum(map { $_->copyto(AI::MXNet::Context->cpu) } @{ $block }) / @{ $block }; $weight->astype($arg_params->{$name}->dtype)->copyto($arg_params->{$name}); - }, $self->param_names, $self->_p->param_arrays); - zip(sub { - my ($name, $block) = @_; + } + for(zip($self->_p->aux_names, $self->_p->aux_arrays)) { + my ($name, $block) = @$_; my $weight = sum(map { $_->copyto(AI::MXNet::Context->cpu) } @{ $block }) / @{ $block }; $weight->astype($aux_params->{$name}->dtype)->copyto($aux_params->{$name}); - }, $self->_p->aux_names, $self->_p->aux_arrays); + } } @@ -668,15 +668,15 @@ method get_output_shapes() { my @shapes = map { $_->shape } @{ $self->execs->[0]->outputs }; my @concat_shapes; - zip(sub { - my ($key, $shape, $axis) = @_; + for(zip($self->symbol->list_outputs, \@shapes, $self->_p->output_layouts)) { + my ($key, $shape, $axis) = @$_; my @the_shape = @{ $shape }; if($axis >= 0) { $the_shape[$axis] = $self->_p->batch_size; } push @concat_shapes, AI::MXNet::DataDesc->new(name => $key, shape => \@the_shape); - }, $self->symbol->list_outputs, \@shapes, $self->_p->output_layouts); + } return \@concat_shapes; } @@ -765,11 +765,11 @@ method backward(Maybe[AI::MXNet::NDArray|ArrayRef[AI::MXNet::NDArray]] $out_grad { confess('re-bind with for_training=1 to run backward') unless $self->for_training; $out_grads //= []; - zip(sub { - my ($i, $exec, $islice) = @_; + for(zip([0..@{ $self->_p->execs }-1], $self->_p->execs, $self->_p->slices)) { + my ($i, $exec, $islice) = @$_; my @out_grads_slice; - zip(sub{ - my ($grad, $axis) = @_; + for(zip($out_grads, $self->_p->output_layouts)) { + my ($grad, $axis) = @$_; if($axis >= 0) { my $og_my_slice = $grad->slice_axis({ @@ -783,9 +783,9 @@ method backward(Maybe[AI::MXNet::NDArray|ArrayRef[AI::MXNet::NDArray]] $out_grad { push @out_grads_slice, $grad->copyto($self->contexts->[$i]); } - }, $out_grads, $self->_p->output_layouts); + } $exec->backward(\@out_grads_slice); - }, [0..@{ $self->_p->execs }-1], $self->_p->execs, $self->_p->slices); + } } =head2 update_metric @@ -802,11 +802,11 @@ method backward(Maybe[AI::MXNet::NDArray|ArrayRef[AI::MXNet::NDArray]] $out_grad method update_metric(AI::MXNet::EvalMetric $eval_metric, ArrayRef[AI::MXNet::NDArray] $labels) { - zip(sub { - my ($texec, $islice) = @_; + for(zip($self->_p->execs, $self->_p->slices)) { + my ($texec, $islice) = @$_; my @labels_slice; - zip(sub { - my ($label, $axis) = @_; + for(zip($labels, $self->_p->label_layouts)) { + my ($label, $axis) = @$_; if($axis == 0) { # slicing NDArray along axis 0 can avoid copying @@ -825,9 +825,9 @@ method update_metric(AI::MXNet::EvalMetric $eval_metric, ArrayRef[AI::MXNet::NDA { push @labels_slice, $label; } - }, $labels, $self->_p->label_layouts); + } $eval_metric->update(\@labels_slice, $texec->outputs); - }, $self->_p->execs, $self->_p->slices); + } } method _bind_ith_exec( @@ -874,8 +874,8 @@ method _bind_ith_exec( method _sliced_shape(ArrayRef[AI::MXNet::DataDesc] $shapes, Int $i, ArrayRef[Int] $major_axis) { my @sliced_shapes; - zip(sub { - my ($desc, $axis) = @_; + for(zip($shapes, $major_axis)) { + my ($desc, $axis) = @$_; my @shape = @{ $desc->shape }; if($axis >= 0) { @@ -887,7 +887,7 @@ method _sliced_shape(ArrayRef[AI::MXNet::DataDesc] $shapes, Int $i, ArrayRef[Int dtype => $desc->dtype, layout => $desc->layout ); - }, $shapes, $major_axis); + } return \@sliced_shapes; } diff --git a/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/Block.pm b/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/Block.pm index 982822be5dc8..148df0471f2a 100644 --- a/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/Block.pm +++ b/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/Block.pm @@ -565,21 +565,21 @@ method infer_shape(@args) my $args = \@args; ($args) = __PACKAGE__->_flatten($args); my %in; - zip(sub { - my ($i, $j) = @_; + for(zip($inputs, $args)) { + my ($i, $j) = @$_; $in{ $i->name } = $j->shape; - }, $inputs, $args); + } my ($arg_shapes, undef, $aux_shapes) = $out->infer_shape(%in); my %sdict; - zip(sub { - my ($i, $j) = @_; + for(zip($out->list_arguments(), $arg_shapes)) { + my ($i, $j) = @$_; $sdict{ $i } = $j; - }, $out->list_arguments(), $arg_shapes); + } my %aux; - zip(sub { - my ($i, $j) = @_; + for(zip($out->list_auxiliary_states(), $aux_shapes)) { + my ($i, $j) = @$_; $aux{ $i } = $j; - }, $out->list_auxiliary_states(), $aux_shapes); + } %sdict = (%sdict, %aux); for my $i ($self->collect_params->values) { @@ -878,10 +878,10 @@ method forward($x, @args) assert((Data::Dumper::Dumper($in_fmt) eq Data::Dumper::Dumper($self->_in_format)), "Invalid input format"); my $ret = $self->_cached_graph->[1]->deepcopy; my %in; - zip(sub { - my ($k, $v) = @_; + for(zip($self->_cached_graph->[0], $args)) { + my ($k, $v) = @$_; $in{$k->name} = $v; - }, $self->_cached_graph->[0], $args); + } $ret->_compose(%in); $ret = (__PACKAGE__->_regroup($ret, $self->_out_format))[0]; if(ref($ret) eq 'ARRAY' and wantarray) diff --git a/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/Parameter.pm b/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/Parameter.pm index 0341fd7e6636..d241aa196a96 100644 --- a/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/Parameter.pm +++ b/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/Parameter.pm @@ -194,8 +194,8 @@ method _load_init($data, $ctx) { if($self->shape) { - zip(sub { - my ($i, $j) = @_; + for(zip($self->shape, $data->shape)) { + my ($i, $j) = @$_; assert( ($i == 0 or $i == $j), sprintf( @@ -204,7 +204,7 @@ method _load_init($data, $ctx) $self->name, "@{$self->shape}", "@{$data->shape}" ) ); - }, $self->shape, $data->shape); + } } if($self->dtype) { @@ -923,4 +923,4 @@ method load( } } -1; \ No newline at end of file +1; diff --git a/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/RNN/Cell.pm b/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/RNN/Cell.pm index d2e7db280aaa..a3fb3c51a147 100644 --- a/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/RNN/Cell.pm +++ b/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/RNN/Cell.pm @@ -1047,10 +1047,10 @@ method hybrid_forward(GluonClass $F, GluonInput $inputs, GluonInput $states) if($p_states != 0) { my @tmp; - zip(sub { - my ($new_s, $old_s) = @_; + for(zip($next_states, $states)) { + my ($new_s, $old_s) = @$_; push @tmp, $F->where($mask->($p_states, $new_s), $new_s, $old_s); - }, $next_states, $states); + } $states = \@tmp; } else @@ -1109,10 +1109,10 @@ method unroll(Int $length, GluonInput $inputs, Maybe[GluonInput] :$begin_state=, else { my @tmp; - zip(sub { - my ($i, $j) = @_; + for(zip($outputs, $inputs)) { + my ($i, $j) = @$_; push @tmp, $F->elemwise_add($i, $j); - }, $outputs, $inputs); + } $outputs = \@tmp; } return ($outputs, $states); @@ -1222,4 +1222,4 @@ method unroll(Int $length, GluonInput $inputs, Maybe[GluonInput] :$begin_state=, __PACKAGE__->register('AI::MXNet::Gluon::RNN'); -1; \ No newline at end of file +1; diff --git a/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/RNN/Layer.pm b/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/RNN/Layer.pm index fa850e62a76a..2b6e8a5bdae4 100644 --- a/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/RNN/Layer.pm +++ b/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/RNN/Layer.pm @@ -230,14 +230,14 @@ method forward(GluonInput $inputs, Maybe[GluonInput] $states=) { $states = [$states]; } - zip(sub { - my ($state, $info) = @_; + for(zip($states, $self->state_info($batch_size))) { + my ($state, $info) = @$_; if(Dumper($state->shape) ne Dumper($info->{shape})) { my @state_shape = @{ $state->shape }; confess("Invalid recurrent state shape. Expecting @{$info->{shape}}, got @state_shape."); } - }, $states, $self->state_info($batch_size)); + } if($self->input_size == 0) { for my $i (0..$self->dir-1) diff --git a/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/Trainer.pm b/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/Trainer.pm index 405c6d29aa38..63f521c5c699 100644 --- a/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/Trainer.pm +++ b/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/Trainer.pm @@ -231,14 +231,14 @@ method step(Int $batch_size, Bool $ignore_stale_grad=0) $self->_kv_store->pull($i, out => $param->list_grad, priority => -$i); } } - zip(sub { - my ($upd, $arr, $grad) = @_; + for(zip($self->_updaters, $param->list_data, $param->list_grad)) { + my ($upd, $arr, $grad) = @$_; if(not $ignore_stale_grad or $arr->_fresh_grad) { $upd->($i, $grad, $arr); $arr->_fresh_grad(0); } - }, $self->_updaters, $param->list_data, $param->list_grad); + } }, $self->_params); } @@ -331,4 +331,4 @@ method load_states(Str $fname) } } -1; \ No newline at end of file +1; diff --git a/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/Utils.pm b/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/Utils.pm index eee3cb5a907b..6acb66237195 100644 --- a/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/Utils.pm +++ b/perl-package/AI-MXNet/lib/AI/MXNet/Gluon/Utils.pm @@ -163,10 +163,10 @@ method split_and_load( } my $slices = __PACKAGE__->split_data($data, scalar(@$ctx_list), $batch_axis, $even_split); my @ret; - zip(sub { - my ($i, $ctx) = @_; + for(zip($slices, $ctx_list)) { + my ($i, $ctx) = @$_; push @ret, $i->as_in_context($ctx); - }, $slices, $ctx_list); + } return \@ret; } @@ -277,4 +277,4 @@ func download(Str $url, Maybe[Str] :$path=, Bool :$overwrite=0, Maybe[Str] :$sha return $fname } -1; \ No newline at end of file +1; diff --git a/perl-package/AI-MXNet/lib/AI/MXNet/KVStore.pm b/perl-package/AI-MXNet/lib/AI/MXNet/KVStore.pm index 4410eb3d7a7a..84a890dcc908 100644 --- a/perl-package/AI-MXNet/lib/AI/MXNet/KVStore.pm +++ b/perl-package/AI-MXNet/lib/AI/MXNet/KVStore.pm @@ -481,12 +481,12 @@ sub _key_value assert(not blessed($vals) and @$keys == @$vals); my @c_keys; my @c_vals; - zip(sub { - my ($key, $val) = @_; + for(zip($keys, $vals)) { + my ($key, $val) = @$_; my ($c_key, $c_val) = _key_value($key, $val); push @c_keys, @$c_key; push @c_vals, @$c_val; - }, $keys, $vals); + } return (\@c_keys, \@c_vals); } } diff --git a/perl-package/AI-MXNet/lib/AI/MXNet/Metric.pm b/perl-package/AI-MXNet/lib/AI/MXNet/Metric.pm index a6b440be6eb3..3b9345d8baf9 100644 --- a/perl-package/AI-MXNet/lib/AI/MXNet/Metric.pm +++ b/perl-package/AI-MXNet/lib/AI/MXNet/Metric.pm @@ -241,8 +241,8 @@ has '+name' => (default => 'accuracy'); method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] $preds) { AI::MXNet::Metric::check_label_shapes($labels, $preds); - zip(sub { - my ($label, $pred_label) = @_; + for(zip($labels, $preds)) { + my ($label, $pred_label) = @$_; if(join(',', @{$pred_label->shape}) ne join(',', @{$label->shape})) { $pred_label = AI::MXNet::NDArray->argmax_channel($pred_label); @@ -251,7 +251,7 @@ method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] my $sum = ($pred_label->aspdl->flat == $label->aspdl->flat)->sum; $self->sum_metric($self->sum_metric + $sum); $self->num_inst($self->num_inst + $pred_label->size); - }, $labels, $preds); + } } package AI::MXNet::TopKAccuracy; @@ -274,8 +274,8 @@ sub BUILD method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] $preds) { AI::MXNet::Metric::check_label_shapes($labels, $preds); - zip(sub { - my ($label, $pred_label) = @_; + for(zip($labels, $preds)) { + my ($label, $pred_label) = @$_; confess('Predictions should be no more than 2 dims') unless @{ $pred_label->shape } <= 2; $pred_label = $pred_label->aspdl->qsorti; @@ -299,7 +299,7 @@ method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] } } $self->num_inst($self->num_inst + $num_samples); - }, $labels, $preds); + } } # Calculate the F1 score of a binary classification problem. @@ -312,16 +312,16 @@ has '+name' => (default => 'f1'); method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] $preds) { AI::MXNet::Metric::check_label_shapes($labels, $preds); - zip(sub { - my ($label, $pred_label) = @_; + for(zip($labels, $preds)) { + my ($label, $pred_label) = @$_; AI::MXNet::Metric::check_label_shapes($label, $pred_label); $pred_label = $pred_label->aspdl->maximum_ind; $label = $label->astype('int32')->aspdl; confess("F1 currently only supports binary classification.") if $label->uniq->shape->at(0) > 2; my ($true_positives, $false_positives, $false_negatives) = (0,0,0); - zip(sub{ - my ($y_pred, $y_true) = @_; + for(zip($pred_label->unpdl, $label->unpdl)) { + my ($y_pred, $y_true) = @$_; if($y_pred == 1 and $y_true == 1) { $true_positives += 1; @@ -334,7 +334,7 @@ method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] { $false_negatives += 1; } - }, $pred_label->unpdl, $label->unpdl); + } my $precision; my $recall; if($true_positives + $false_positives > 0) @@ -364,7 +364,7 @@ method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] } $self->sum_metric($self->sum_metric + $f1_score); $self->num_inst($self->num_inst + 1); - }, $labels, $preds); + } } package AI::MXNet::Perplexity; @@ -408,8 +408,8 @@ method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] { AI::MXNet::Metric::check_label_shapes($labels, $preds); my ($loss, $num) = (0, 0); - zip(sub { - my ($label, $pred) = @_; + for(zip($labels, $preds)) { + my ($label, $pred) = @$_; my $label_shape = $label->shape; my $pred_shape = $pred->shape; assert( @@ -426,7 +426,7 @@ method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] } $loss -= $pred->maximum(1e-10)->log->sum->asscalar; $num += $pred->size; - }, $labels, $preds); + } $self->sum_metric($self->sum_metric + $loss); $self->num_inst($self->num_inst + $num); } @@ -450,8 +450,8 @@ has '+name' => (default => 'mae'); method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] $preds) { AI::MXNet::Metric::check_label_shapes($labels, $preds); - zip(sub { - my ($label, $pred) = @_; + for(zip($labels, $preds)) { + my ($label, $pred) = @$_; $label = $label->aspdl; $pred = $pred->aspdl; if($label->ndims == 1) @@ -460,7 +460,7 @@ method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] } $self->sum_metric($self->sum_metric + ($label - $pred)->abs->avg); $self->num_inst($self->num_inst + 1); - }, $labels, $preds); + } } # Calculate Mean Squared Error loss @@ -473,8 +473,8 @@ has '+name' => (default => 'mse'); method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] $preds) { AI::MXNet::Metric::check_label_shapes($labels, $preds); - zip(sub { - my ($label, $pred) = @_; + for(zip($labels, $preds)) { + my ($label, $pred) = @$_; $label = $label->aspdl; $pred = $pred->aspdl; if($label->ndims == 1) @@ -483,7 +483,7 @@ method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] } $self->sum_metric($self->sum_metric + (($label - $pred)**2)->avg); $self->num_inst($self->num_inst + 1); - }, $labels, $preds); + } } # Calculate Root Mean Squred Error loss @@ -496,8 +496,8 @@ has '+name' => (default => 'rmse'); method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] $preds) { AI::MXNet::Metric::check_label_shapes($labels, $preds); - zip(sub { - my ($label, $pred) = @_; + for(zip($labels, $preds)) { + my ($label, $pred) = @$_; $label = $label->aspdl; $pred = $pred->aspdl; if($label->ndims == 1) @@ -506,7 +506,7 @@ method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] } $self->sum_metric($self->sum_metric + sqrt((($label - $pred)**2)->avg)); $self->num_inst($self->num_inst + 1); - }, $labels, $preds); + } } # Calculate Cross Entropy loss @@ -521,8 +521,8 @@ method python_constructor_arguments() { ['eps'] } method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] $preds) { AI::MXNet::Metric::check_label_shapes($labels, $preds); - zip(sub { - my ($label, $pred) = @_; + for(zip($labels, $preds)) { + my ($label, $pred) = @$_; $label = $label->aspdl->flat; $pred = $pred->aspdl; my $label_shape = $label->shape->at(0); @@ -534,7 +534,7 @@ method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] my $prob = $pred->index($label); $self->sum_metric($self->sum_metric + (-($prob + $self->eps)->log)->sum); $self->num_inst($self->num_inst + $label_shape); - }, $labels, $preds); + } } package AI::MXNet::PearsonCorrelation; @@ -570,8 +570,8 @@ has '+name' => (default => 'pearson-correlation'); method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] $preds) { AI::MXNet::Metric::check_label_shapes($labels, $preds); - zip(sub { - my ($label, $pred) = @_; + for(zip($labels, $preds)) { + my ($label, $pred) = @$_; AI::MXNet::Metric::check_label_shapes($label, $pred); $label = $label->aspdl->flat; $pred = $pred->aspdl->flat; @@ -583,7 +583,7 @@ method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] ((($label-$label_mean)*($pred-$pred_mean))->sum/$label->nelem)/(($label_stdv*$pred_stdv)->at(0)) ); $self->num_inst($self->num_inst + 1); - }, $labels, $preds); + } } package AI::MXNet::Loss; @@ -749,8 +749,8 @@ method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] { AI::MXNet::Metric::check_label_shapes($labels, $preds) unless $self->allow_extra_outputs; - zip(sub { - my ($label, $pred) = @_; + for(zip($labels, $preds)) { + my ($label, $pred) = @$_; $label = $label->aspdl; $pred = $pred->aspdl; my $value = $self->eval_function->($label, $pred); @@ -758,7 +758,7 @@ method update(ArrayRef[AI::MXNet::NDArray] $labels, ArrayRef[AI::MXNet::NDArray] my $num_inst = ref $value ? $value->[1] : 1; $self->sum_metric($self->sum_metric + $sum_metric); $self->num_inst($self->num_inst + $num_inst); - }, $labels, $preds); + } } package AI::MXNet::Metric; diff --git a/perl-package/AI-MXNet/lib/AI/MXNet/Module.pm b/perl-package/AI-MXNet/lib/AI/MXNet/Module.pm index a1aa1b2f9769..3229d22597d0 100644 --- a/perl-package/AI-MXNet/lib/AI/MXNet/Module.pm +++ b/perl-package/AI-MXNet/lib/AI/MXNet/Module.pm @@ -809,12 +809,12 @@ method forward( else { $new_dshape = []; - zip(sub { - my ($i, $shape) = @_; + for(zip($self->data_shapes, \@new_data_shapes)) { + my ($i, $shape) = @$_; push @{ $new_dshape }, AI::MXNet::DataDesc->new( $i->name, $shape, $i->dtype, $i->layout ); - }, $self->data_shapes, \@new_data_shapes); + } } my $new_lshape; if($data_batch->can('provide_label') and $data_batch->provide_label) @@ -824,12 +824,12 @@ method forward( elsif($data_batch->can('label') and $data_batch->label) { $new_lshape = []; - zip(sub { - my ($i, $j) = @_; + for(zip($self->label_shapes, $data_batch->label)) { + my ($i, $j) = @$_; push @{ $new_lshape }, AI::MXNet::DataDesc->new( $i->name, $j->shape, $i->dtype, $i->layout ); - }, $self->label_shapes, $data_batch->label); + } } $self->reshape(data_shapes => $new_dshape, label_shapes => $new_lshape); } diff --git a/perl-package/AI-MXNet/lib/AI/MXNet/Monitor.pm b/perl-package/AI-MXNet/lib/AI/MXNet/Monitor.pm index 993461713cb6..386164112e65 100644 --- a/perl-package/AI-MXNet/lib/AI/MXNet/Monitor.pm +++ b/perl-package/AI-MXNet/lib/AI/MXNet/Monitor.pm @@ -145,14 +145,14 @@ method toc() } for my $exe (@{ $self->exes }) { - zip(sub { - my ($name, $array) = @_; + for(zip($exe->_symbol->list_arguments, $exe->arg_arrays)) { + my ($name, $array) = @$_; push @{ $self->queue }, [$self->step, $name, $self->stat_func->($array)]; - }, $exe->_symbol->list_arguments, $exe->arg_arrays); - zip(sub { - my ($name, $array) = @_; + } + for(zip($exe->_symbol->list_auxiliary_states, $exe->aux_arrays)) { + my ($name, $array) = @$_; push @{ $self->queue }, [$self->step, $name, $self->stat_func->($array)]; - }, $exe->_symbol->list_auxiliary_states, $exe->aux_arrays); + } } $self->activated(0); my @res; diff --git a/perl-package/AI-MXNet/lib/AI/MXNet/NDArray.pm b/perl-package/AI-MXNet/lib/AI/MXNet/NDArray.pm index 7193f526b892..ffee1295d0db 100644 --- a/perl-package/AI-MXNet/lib/AI/MXNet/NDArray.pm +++ b/perl-package/AI-MXNet/lib/AI/MXNet/NDArray.pm @@ -96,12 +96,12 @@ method at(Index @indices) or full crop") if $isize > 1 and $dsize != $isize; my $i = 0; - zip(sub { - my ($idx, $dim_size) = @_; + for(zip(\@indices, $shape)) { + my ($idx, $dim_size) = @$_; confess("Dimension $i mismatch Idx: $idx >= Dim Size: $dim_size") if $idx >= $dim_size or ($idx + $dim_size) < 0; ++$i; - }, \@indices, $shape); + } $i = 0; for my $v (@indices) { @@ -151,8 +151,8 @@ method slice(Slice|AdvancedSlice @slices) ++$i; ref $_ ? (@$_ == 1 ? [$_->[0], $_->[0]] : $_) : ($_ eq 'X' ? [0, $shape->[$i] - 1] : [$_, $_]); } @slices; - zip(sub { - my ($slice, $dim_size) = @_; + for(zip(\@slices, $shape)) { + my ($slice, $dim_size) = @$_; my ($begin, $end, $stride) = @$slice; confess("NDArray does not support slice strides != 1") if ($stride//0) > 1; @@ -160,7 +160,7 @@ method slice(Slice|AdvancedSlice @slices) if $begin >= $dim_size or ($begin + $dim_size) < 0; confess("Dimension $i mismatch slice end : $end >= Dim Size: $dim_size") if $end >= $dim_size or ($end + $dim_size) < 0; - }, \@slices, $shape); + } $i = 0; my ($begin, $end) = ([], []); for my $s (@slices) diff --git a/perl-package/AI-MXNet/lib/AI/MXNet/NDArray/Slice.pm b/perl-package/AI-MXNet/lib/AI/MXNet/NDArray/Slice.pm index ea49ac5960a4..1a3ea7e0a460 100644 --- a/perl-package/AI-MXNet/lib/AI/MXNet/NDArray/Slice.pm +++ b/perl-package/AI-MXNet/lib/AI/MXNet/NDArray/Slice.pm @@ -55,12 +55,10 @@ method set(AcceptableInput $value, $reverse=) { confess("set value must be defined") unless defined $value; confess("${\ $self->parent } is not writable") unless $self->parent->writable; - my $shape = []; - zip( - sub { my ($begin, $end) = @_; push @$shape, ($end-$begin); }, - $self->begin, - $self->end - ); + my $shape = [ map { + my($begin, $end) = @$_; + ($end-$begin); + } zip($self->begin, $self->end) ]; if(ref $value) { if(blessed($value) and $value->isa('AI::MXNet::NDArray')) @@ -77,15 +75,11 @@ method set(AcceptableInput $value, $reverse=) } confess("value $value does not match slice dim sizes [@$shape]") if @{$value->shape} != @$shape; - zip( - sub { - my ($dsize, $vdsize) = @_; + for(zip($shape, $value->shape)) { + my ($dsize, $vdsize) = @$_; confess("Slice [@$shape] != $value given as value") if $dsize != $vdsize; - }, - $shape, - $value->shape - ); + } AI::MXNet::NDArray->_crop_assign( $self->parent, $value, diff --git a/perl-package/AI-MXNet/lib/AI/MXNet/RNN/Cell.pm b/perl-package/AI-MXNet/lib/AI/MXNet/RNN/Cell.pm index 38db4090556e..f2d8b5369e99 100644 --- a/perl-package/AI-MXNet/lib/AI/MXNet/RNN/Cell.pm +++ b/perl-package/AI-MXNet/lib/AI/MXNet/RNN/Cell.pm @@ -1412,15 +1412,15 @@ method unroll( $r_outputs = [reverse(@{ $r_outputs })]; } my $outputs = []; - zip(sub { - my ($i, $l_o, $r_o) = @_; + for(zip([0..@{ $l_outputs }-1], [@{ $l_outputs }], [@{ $r_outputs }])) { + my ($i, $l_o, $r_o) = @$_; push @$outputs, AI::MXNet::Symbol->Concat( $l_o, $r_o, dim=>(1+($merge_outputs?1:0)), name => $merge_outputs ? sprintf('%sout', $self->_output_prefix) : sprintf('%st%d', $self->_output_prefix, $i) ); - }, [0..@{ $l_outputs }-1], [@{ $l_outputs }], [@{ $r_outputs }]); + } if($merge_outputs) { $outputs = @{ $outputs }[0]; @@ -1907,14 +1907,14 @@ method call(AI::MXNet::Symbol $inputs, SymbolOrArrayOfSymbols $states) my @states; if($p_states != 0) { - zip(sub { - my ($new_s, $old_s) = @_; + for(zip($next_states, $states)) { + my ($new_s, $old_s) = @$_; push @states, AI::MXNet::Symbol->where( $mask->($p_states, $new_s), $new_s, $old_s ); - }, $next_states, $states); + } } $self->prev_output($output); return ($output, @states ? \@states : $next_states); @@ -1968,11 +1968,11 @@ method unroll( else { my @temp; - zip(sub { - my ($output_sym, $input_sym) = @_; + for(zip([@{ $outputs }], [@{ $inputs }])) { + my ($output_sym, $input_sym) = @$_; push @temp, AI::MXNet::Symbol->elemwise_add($output_sym, $input_sym, name=>$output_sym->name."_plus_residual"); - }, [@{ $outputs }], [@{ $inputs }]); + } $outputs = \@temp; } return ($outputs, $states); diff --git a/perl-package/AI-MXNet/lib/AI/MXNet/Symbol.pm b/perl-package/AI-MXNet/lib/AI/MXNet/Symbol.pm index d35bdaea62cd..8fd885a1d2c8 100644 --- a/perl-package/AI-MXNet/lib/AI/MXNet/Symbol.pm +++ b/perl-package/AI-MXNet/lib/AI/MXNet/Symbol.pm @@ -585,8 +585,8 @@ method infer_shape(Maybe[Str|Shape] @args) my ($arg_shapes) = $self->_infer_shape_impl(1, @args); my $arg_names = $self->list_arguments; my @unknowns; - zip(sub { - my ($name, $shape) = @_; + for(zip($arg_names, $arg_shapes)) { + my ($name, $shape) = @$_; if(not ref $shape or not @$shape or not product(@$shape)) { if(@unknowns >= 10) @@ -599,7 +599,7 @@ method infer_shape(Maybe[Str|Shape] @args) push @unknowns, "$name @shape"; } } - }, $arg_names, $arg_shapes); + } AI::MXNet::Logging->warning( "Cannot decide shape for the following arguments " ."(0s in shape means unknown dimensions). " diff --git a/perl-package/AI-MXNet/t/test_autograd.t b/perl-package/AI-MXNet/t/test_autograd.t index 32225bfd2728..b45d233d79a0 100644 --- a/perl-package/AI-MXNet/t/test_autograd.t +++ b/perl-package/AI-MXNet/t/test_autograd.t @@ -37,10 +37,10 @@ sub autograd_assert ok(same($output->aspdl, $res->aspdl)); my $grad_res = $grad_f->(@args); ok(@$grad_vals == @$grad_res); - zip(sub { - my ($a, $b) = @_; + for(zip($grad_vals, $grad_res)) { + my ($a, $b) = @$_; ok(same($a->aspdl, $b->aspdl)); - }, $grad_vals, $grad_res); + } } sub test_unary_func diff --git a/perl-package/AI-MXNet/t/test_base.t b/perl-package/AI-MXNet/t/test_base.t new file mode 100644 index 000000000000..ea0bd0ef98f3 --- /dev/null +++ b/perl-package/AI-MXNet/t/test_base.t @@ -0,0 +1,107 @@ +use strict; +use warnings; +use Test::More; +use AI::MXNet qw(mx); + +sub test_builtin_zip() +{ + is_deeply( + [ AI::MXNet::zip([ 0 .. 9 ], [ 10 .. 19 ]) ], + [ map { [ $_, 10 + $_ ] } 0 .. 9 ]); + is_deeply( + [ AI::MXNet::zip([ 0 .. 9 ], [ 10 .. 19 ], [ 20 .. 29 ]) ], + [ map { [ $_, 10 + $_, 20 + $_ ] } 0 .. 9 ]); + my $over = ListOverload->new(10 .. 19); + is_deeply( + [ AI::MXNet::zip([ 0 .. 9 ], \@$over) ], + [ map { [ $_, 10 + $_ ] } 0 .. 9 ]); + my $tied = ListTied->new(10 .. 19); + is_deeply( + [ AI::MXNet::zip([ 0 .. 9 ], \@$tied) ], + [ map { [ $_, 10 + $_ ] } 0 .. 9 ]); +} + + +test_builtin_zip(); +done_testing(); + +package ListTied { + sub new { + my($class, @list) = @_; + my @tied; + tie @tied, $class, @list; + return \@tied; + } + sub TIEARRAY { + my($class, @list) = @_; + return bless { list => \@list }, $class; + } + sub FETCH { + my($self, $index) = @_; + return $self->{list}[$index]; + } + sub STORE { + my($self, $index, $value) = @_; + return $self->{list}[$index] = $value; + } + sub FETCHSIZE { + my($self) = @_; + return scalar @{$self->{list}}; + } + sub STORESIZE { + my($self, $count) = @_; + return $self->{list}[$count - 1] //= undef; + } + sub EXTEND { + my($self, $count) = @_; + return $self->STORESIZE($count); + } + sub EXISTS { + my($self, $key) = @_; + return exists $self->{list}[$key]; + } + sub DELETE { + my($self, $key) = @_; + return delete $self->{list}[$key]; + } + sub CLEAR { + my($self) = @_; + return @{$self->{list}} = (); + } + sub PUSH { + my($self, @list) = @_; + return push @{$self->{list}}, @list; + } + sub POP { + my($self) = @_; + return pop @{$self->{list}}; + } + sub SHIFT { + my($self) = @_; + return shift @{$self->{list}}; + } + sub UNSHIFT { + my($self, @list) = @_; + return unshift @{$self->{list}}, @list; + } + sub SPLICE { + my($self, $offset, $length, @list) = @_; + return splice @{$self->{list}}, $offset, $length, @list; + } + sub UNTIE { + my($self) = @_; + } + sub DESTROY { + my($self) = @_; + } +} + +package ListOverload { + use overload '@{}' => \&as_list; + sub new { + my($class, @list) = @_; + return bless { list => \@list }, $class; + } + sub as_list { return $_[0]{list} } +} + diff --git a/perl-package/AI-MXNet/t/test_model_parallel.t b/perl-package/AI-MXNet/t/test_model_parallel.t index 6a8aba7aab06..76fe25625be3 100644 --- a/perl-package/AI-MXNet/t/test_model_parallel.t +++ b/perl-package/AI-MXNet/t/test_model_parallel.t @@ -65,10 +65,10 @@ sub test_chain $out_grad .= 1; $exec1->backward([$out_grad]); $exec2->backward([$out_grad->copyto($ctx1)]); - zip(sub { - my ($a, $b) = @_; + for(zip($arr_grad, $arr_grad2)) { + my ($a, $b) = @$_; ok(reldiff($a->aspdl, $b->aspdl) < 1e-6); - }, $arr_grad, $arr_grad2); + } } test_chain(); diff --git a/perl-package/AI-MXNet/t/test_module.t b/perl-package/AI-MXNet/t/test_module.t index 7c5690a68b15..305b232a7222 100644 --- a/perl-package/AI-MXNet/t/test_module.t +++ b/perl-package/AI-MXNet/t/test_module.t @@ -148,10 +148,10 @@ sub test_module_states $mod->forward($batch); my $out2 = $mod->get_outputs(1); - zip(sub { - my ($x1, $x2) = @_; + for(zip($out1, $out2)) { + my ($x1, $x2) = @$_; ok(not almost_equal($x1->aspdl, $x2->aspdl, 1e-3)); - }, $out1, $out2); + } } sub test_module_switch_bucket @@ -619,4 +619,4 @@ test_module_reshape(); test_save_load(); test_executor_group(); test_module_set_params(); -test_forward_reshape(); \ No newline at end of file +test_forward_reshape(); diff --git a/perl-package/AI-MXNet/t/test_multi_device_exec.t b/perl-package/AI-MXNet/t/test_multi_device_exec.t index 87ca25778c92..15111a7a5d80 100644 --- a/perl-package/AI-MXNet/t/test_multi_device_exec.t +++ b/perl-package/AI-MXNet/t/test_multi_device_exec.t @@ -41,8 +41,8 @@ sub test_ctx_group shapes => { data => [1,200] } ); - zip(sub { - my ($arr, $name) = @_; + for(zip($texec->arg_arrays, $mlp->list_arguments())) { + my ($arr, $name) = @$_; if(exists $set_stage1{ $name }) { ok($arr->context == $group2ctx->{stage1}); @@ -51,7 +51,7 @@ sub test_ctx_group { ok($arr->context == $group2ctx->{stage2}); } - }, $texec->arg_arrays, $mlp->list_arguments()); + } } test_ctx_group(); diff --git a/perl-package/AI-MXNetCAPI/mxnet.i b/perl-package/AI-MXNetCAPI/mxnet.i index e466e98b7842..663a0c285f0b 100644 --- a/perl-package/AI-MXNetCAPI/mxnet.i +++ b/perl-package/AI-MXNetCAPI/mxnet.i @@ -106,7 +106,44 @@ static void ExecutorMonitor_callback(const char* name, NDArrayHandle handle, voi %} +%{ + +/* this is an adaptation of Python/bltinmodule.c's builtin_zip() */ +XS(py_zip) { + dXSARGS; + I32 i; + I32 len = -1; + AV *l[items]; + + for(i = 0; i < items; i++) { + AV *av = (AV *)SvRV(ST(i)); + I32 thislen; + + if(SvTYPE(av) != SVt_PVAV) + croak("zip argument#%d must be an array", i); + thislen = av_len(av) + 1; + if(len < 0 || thislen < len) + len = thislen; + l[i] = av; + } + EXTEND(SP, len); + for(i = 0; i < len; i++) { + I32 j; + SV *next[items]; + + for(j = 0; j < items; j++) { + SV **sv = av_fetch(l[j], i, 0); + next[j] = sv ? *sv : &PL_sv_undef; + } + ST(i) = sv_2mortal(newRV_noinc((SV *)av_make(items, next))); + } + XSRETURN(len); +} + +%} + %init %{ + newXS(SWIG_prefix "py_zip", py_zip, (char *)__FILE__); /* These SWIG_TypeClientData() calls might break in the future, but * %rename should work on these types before that happens. */ SWIG_TypeClientData(SWIGTYPE_p_MXNDArray, (void *)"NDArrayHandle");