100 limit to avoid running into memory issues.

This commit is contained in:
sergiotarxz 2023-08-08 19:59:20 +02:00
parent 67c2407ae4
commit cfc07fab7b

View File

@ -43,18 +43,29 @@ sub MIGRATIONS {
); );
} }
sub _populate_locations($dbh) { sub _populate_locations ($dbh) {
require BurguillosInfo; require BurguillosInfo;
require BurguillosInfo::Tracking; require BurguillosInfo::Tracking;
my $tracking = BurguillosInfo::Tracking->new(BurguillosInfo->new); my $tracking = BurguillosInfo::Tracking->new( BurguillosInfo->new );
my $data = $dbh->selectall_arrayref(<<'EOF', {Slice => {}}); my $page = 0;
while (1) {
my $data = $dbh->selectall_arrayref( <<'EOF', { Slice => {} }, $page );
SELECT uuid, remote_address SELECT uuid, remote_address
FROM requests FROM requests
WHERE date > NOW() - interval '2 months'; WHERE date > NOW() - interval '2 months'
LIMIT 100
OFSSET ?;
EOF EOF
for my $request (@$data) { if (!@$data) {
my ($uuid, $remote_address) = $request->@{'uuid', 'remote_address'}; return;
$tracking->update_country_and_subdivision($dbh, $uuid, $remote_address); }
for my $request (@$data) {
my ( $uuid, $remote_address ) =
$request->@{ 'uuid', 'remote_address' };
$tracking->update_country_and_subdivision( $dbh, $uuid,
$remote_address );
}
$page += 100;
} }
} }
1; 1;