drop foreign table fperf1; drop user mapping for current_user server srv; drop server srv; drop table perf1; drop extension postgres_fdw; create extension postgres_fdw; create table perf1( c1 int primary key, c2 int not null, c3 int, c4 text, c5 timestamptz, c6 char(10) ); insert into perf1 select id, id % 3, id % 5, to_char(id%10, 'FM0000000'), '1970-01-01'::timestamptz + ((id % 100) || ' days')::interval, id % 1000 from generate_series(1, 1000000) id; do $d$ begin execute $$create server srv foreign data wrapper postgres_fdw options (dbname '$$||current_database()||$$', port '$$||current_setting('port')||$$' )$$; end; $d$; create user mapping for current_user server srv; create foreign table fperf1( c1 int, c2 int, c3 int, c4 text, c5 timestamptz, c6 char(10) ) server srv options (table_name 'perf1'); analyze perf1; -- This part is taken from Ashutosh Bapat's script posted for -- partitionwise join and modified per my requirement. -- Table to collect data drop table perf; create table perf(query text, rows bigint, avg_exe_time float, std_dev_exe_time float, min_exe_time float, max_exe_time float); drop function query_execution_stats(num_samples int); create function query_execution_stats(num_samples int) RETURNS void LANGUAGE plpgsql AS $$ DECLARE q record; plan json; e json; avg_exe_time float; std_dev_exe_time float; min_exe_time float; max_exe_time float; BEGIN CREATE TEMPORARY TABLE query_exe_times(exe_time float); FOR q IN select query from querytab LOOP -- Execute query a few times to warm the cache FOR i IN 1 .. num_samples/5 LOOP EXECUTE q.query; END LOOP; TRUNCATE query_exe_times; RAISE NOTICE 'Perf testing query: %', q.query; EXECUTE 'EXPLAIN (analyze, verbose, format json) ' || q.query INTO e; -- RAISE NOTICE 'Remote SQL: %', e->0->'Plan'->'Remote SQL'; FOR i IN 1 .. num_samples LOOP EXECUTE 'EXPLAIN (analyze, format json) ' || q.query INTO plan; INSERT INTO query_exe_times VALUES ((plan->0->'Execution Time')::text::float); -- RAISE NOTICE ' completed % samples', i; END LOOP; SELECT avg(exe_time), stddev(exe_time), min(exe_time), max(exe_time) INTO avg_exe_time, std_dev_exe_time, min_exe_time, max_exe_time FROM query_exe_times; INSERT INTO perf VALUES (q.query, (e->0->'Plan'->'Actual Rows')::text::bigint, avg_exe_time, std_dev_exe_time, min_exe_time, max_exe_time); END LOOP; DROP TABLE query_exe_times; END; $$; --Run queries now drop table querytab; create table querytab(query text); -- Simple count(*) on large table insert into querytab values ('select count(*) from fperf1'); insert into querytab values ('select c2, avg(c1) from fperf1 group by c2 having count(*) < 333334'); insert into querytab values ('select c2, sum(c1) from fperf1 group by c2'); insert into querytab values ('select c3, avg(c1), sum(c2) from fperf1 group by c3'); insert into querytab values ('select c4, avg(c1), sum(c2) from fperf1 group by c4'); insert into querytab values ('select c5, avg(c1), sum(c2) from fperf1 group by c5'); insert into querytab values ('select c6, avg(c1), sum(c2) from fperf1 group by c6'); insert into querytab values ('select c1%1, avg(c1), sum(c2) from fperf1 group by c1%1'); insert into querytab values ('select c1%10, avg(c1), sum(c2) from fperf1 group by c1%10'); insert into querytab values ('select c1%100, avg(c1), sum(c2) from fperf1 group by c1%100'); insert into querytab values ('select c1%1000, avg(c1), sum(c2) from fperf1 group by c1%1000'); insert into querytab values ('select c1%10000, avg(c1), sum(c2) from fperf1 group by c1%10000'); insert into querytab values ('select c1%100000, avg(c1), sum(c2) from fperf1 group by c1%100000'); insert into querytab values ('select c1%1000000, avg(c1), sum(c2) from fperf1 group by c1%1000000'); \set num_samples 10 select query_execution_stats(:num_samples); select * from perf;