## The purpose of this program is to load the same files that \copy would 
## load into a pgsql table, but do so one row at a time rather than in bulk.
## This could be useful to demonstrate the difference in loading efficiency between bulk 
## and row by row, without having to use different formats for each.

## This makes no attempt to deal with escape characters like \t and \n the same way \copy does, 
## so the loaded results will not be identical but that shouldn't matter if used only for 
## benchmarking and not for actual production loading (and why would you use this for that purpose when 
## \copy is available?)

## When loading into a 2 column unindexed untriggered table, Perl takes about half the CPU time and 
## postgres about half.  When loading to a table with triggers or indexs, Perl's slice becomes 
## mostly immaterial.

use strict;
use warnings;
use DBI;
use Time::HiRes qw(time);
my $start=time();
my $dbi=DBI->connect('DBI:Pg:');
my ($columns) = $dbi->selectrow_array("select count(*) from information_schema.columns where table_name=?", undef, $ARGV[0]);
$columns > 0 or die "no such table '$ARGV[0]'";
## prepare an insert with as many placeholders as the table has columns
my $insert=$dbi->prepare("Insert into $ARGV[0] values (" . (join ',', map '?', 1..$columns) .')');

open my $fh, "<", $ARGV[1] or die "Couldn't open '$ARGV[1]': $!";
$dbi->begin_work();
while (<$fh>) { chomp;
	my @x=split /\t/;
	$insert->execute(@x);
};
$dbi->commit();
my $stop=time();
## make a timing output that look like the one psql \timing would generate
print "Time: ", 1000* ($stop-$start), " ms\n";

