commit
644ccadbfc
4 changed files with 37 additions and 30 deletions
|
@ -2,6 +2,11 @@
|
|||
|
||||
# Next Release
|
||||
|
||||
# v0.10.9
|
||||
|
||||
* Removed sector list from dashboard stats and swapped it for category list
|
||||
* Added fix to recurring transaction script
|
||||
|
||||
# v0.10.8
|
||||
|
||||
* Added yearly recurring payments
|
||||
|
|
|
@ -87,11 +87,10 @@ sub run {
|
|||
return;
|
||||
}
|
||||
|
||||
my $now = DateTime->now();
|
||||
my $purchase_time = DateTime->new(
|
||||
year => $now->year,
|
||||
month => $now->month,
|
||||
day => $now->day,
|
||||
year => $datetime->year,
|
||||
month => $datetime->month,
|
||||
day => $datetime->day,
|
||||
hour => $start_time_dt->hour,
|
||||
minute => $start_time_dt->minute,
|
||||
second => $start_time_dt->second,
|
||||
|
|
|
@ -63,6 +63,8 @@ sub post_customer {
|
|||
|
||||
my $entity = $c->stash->{api_user}->entity;
|
||||
|
||||
my $purchase_rs = $entity->purchases;
|
||||
|
||||
my $duration_weeks = DateTime::Duration->new( weeks => 7 );
|
||||
my $end = DateTime->today;
|
||||
my $start_weeks = $end->clone->subtract_duration( $duration_weeks );
|
||||
|
@ -106,28 +108,32 @@ sub post_customer {
|
|||
count => $count,
|
||||
};
|
||||
|
||||
my $sectors = { sectors => [], purchases => [] };
|
||||
my $data = { cat_total => {}, categories => {}, essentials => {}, cat_list => {} };
|
||||
|
||||
my $sector_purchase_rs = $entity->purchases->search({},
|
||||
my $category_list = $c->schema->resultset('Category')->as_hash;
|
||||
|
||||
my $category_purchase_rs = $purchase_rs->search({},
|
||||
{
|
||||
join => { 'seller' => 'organisation' },
|
||||
join => 'category',
|
||||
columns => {
|
||||
sector => "organisation.sector",
|
||||
count => \"COUNT(*)",
|
||||
category_id => "category.category_id",
|
||||
value => { sum => 'value' },
|
||||
},
|
||||
group_by => "organisation.sector",
|
||||
order_by => { '-desc' => $c->pg_or_sqlite('count',"COUNT(*)",)},
|
||||
group_by => "category.category_id",
|
||||
}
|
||||
);
|
||||
|
||||
for ( $sector_purchase_rs->all ) {
|
||||
push @{ $sectors->{ sectors } }, $_->get_column('sector');
|
||||
push @{ $sectors->{ purchases } }, ($_->get_column('count') || 0);
|
||||
my %cat_total_list;
|
||||
|
||||
for ( $category_purchase_rs->all ) {
|
||||
my $category = $_->get_column('category_id') || 0;
|
||||
my $value = ($_->get_column('value') || 0) / 100000;
|
||||
|
||||
$cat_total_list{$category_list->{$category}} += $value;
|
||||
}
|
||||
|
||||
my $data = { cat_total => {}, categories => {}, essentials => {} };
|
||||
|
||||
my $purchase_rs = $entity->purchases;
|
||||
my @cat_lists = map { { category => $_, value => $cat_total_list{$_} } } sort keys %cat_total_list;
|
||||
$data->{cat_list} = [ sort { $b->{value} <=> $a->{value} } @cat_lists ];
|
||||
|
||||
my $purchase_no_essential_rs = $purchase_rs->search({
|
||||
"me.essential" => 1,
|
||||
|
@ -163,8 +169,6 @@ sub post_customer {
|
|||
}
|
||||
);
|
||||
|
||||
my $category_list = $c->schema->resultset('Category')->as_hash;
|
||||
|
||||
for my $cat_trans ( $month_transaction_category_rs->all ) {
|
||||
my $quantised = $c->db_datetime_parser->parse_datetime($cat_trans->get_column('quantised'));
|
||||
my $days = $c->format_iso_date( $quantised ) || 0;
|
||||
|
@ -189,7 +193,6 @@ sub post_customer {
|
|||
success => Mojo::JSON->true,
|
||||
data => $data,
|
||||
weeks => $weeks,
|
||||
sectors => $sectors,
|
||||
});
|
||||
}
|
||||
|
||||
|
|
|
@ -55,10 +55,6 @@ $t->post_ok('/api/stats/customer' => json => {
|
|||
sum => 118,
|
||||
count => 6,
|
||||
})
|
||||
->json_is('/sectors', {
|
||||
sectors => ['A'],
|
||||
purchases => [118],
|
||||
})
|
||||
->json_is('/data', {
|
||||
cat_total => {
|
||||
Uncategorised => 810,
|
||||
|
@ -88,6 +84,10 @@ $t->post_ok('/api/stats/customer' => json => {
|
|||
cat_total => {
|
||||
Uncategorised => 810,
|
||||
},
|
||||
cat_list => [{
|
||||
category => "Uncategorised",
|
||||
value => 1180,
|
||||
}],
|
||||
essentials => {
|
||||
purchase_no_essential_total => 0,
|
||||
purchase_no_total => 118,
|
||||
|
|
Reference in a new issue