Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Optimizing competitor kpis #89

Merged
merged 9 commits into from
Dec 13, 2024
1 change: 0 additions & 1 deletion cowamm/.sqlfluff
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,3 @@ end_time='2024-08-27 00:00:00'
results=final_results_per_solver,cow_surplus_per_batch
cow_budget=30000
number_of_pools=1000
competitor_end_time = 2100-01-01
44 changes: 29 additions & 15 deletions cowamm/kpi/competitors/curve/curve_kpis_4232873.sql
Original file line number Diff line number Diff line change
Expand Up @@ -2,35 +2,49 @@
-- APR is measured as the fees earned per $ invested, over the last 24 hours, projected over 1 year
-- Parameters:
-- {{blockchain}}: The blockchain to query
-- {{competitor_end_time}}: The end time of the time window (end_time - 1 day; end_time), defaults to now()
select
contract_address,
tvl,
fee,
volume,
apr
from (
-- {{start_time}}: The start time of the analysis. date '{{start_time}}' <= evt_block_time < date '{{start_time}}' + 1 day
-- By default, we look at the past full day

with accumulated_kpis as (
select
contract_address,
r.contract_address,
fee,
tvl,
latest_per_pool,
sum(amount_usd) over (partition by contract_address order by latest_per_pool) as volume,
365 * sum(amount_usd * fee / tvl) over (partition by contract_address order by latest_per_pool) as apr
-- The first call to 4232976 gets the tvl after each tx to compute volume/tvl
from "query_4232976(blockchain='{{blockchain}}')" as r
sum(amount_usd) over (partition by r.contract_address order by latest_per_pool desc) as volume,
365 * sum(amount_usd * fee / (reserve0 * p0.price * power(10, -p0.decimals) + reserve1 * p1.price * power(10, -p1.decimals))) over (partition by r.contract_address order by latest_per_pool desc) as apr,
-- new index to make sure rows don't get lost in the filtering later
row_number() over (partition by r.contract_address order by r.evt_block_time desc) as latest_per_pool
from "query_4232976(blockchain='{{blockchain}}', number_of_pools = '{{number_of_pools}}', start_time = '{{start_time}}')" as r
left join
( --noqa: ST05
select *
from curve.trades
where
block_time >= date_add('day', -1, (case when '{{competitor_end_time}}' = '2100-01-01' then now() else timestamp '{{competitor_end_time}}' end))
block_time >= least(date('{{start_time}}'), date_add('day', -1, date(now())))
) as t
on
r.contract_address = t.project_contract_address
and r.tx_hash = t.tx_hash
inner join prices.minute as p0
on
r.token0 = p0.contract_address
and date_trunc('minute', r.evt_block_time) = p0.timestamp
inner join prices.minute as p1
on
r.token1 = p1.contract_address
and date_trunc('minute', r.evt_block_time) = p1.timestamp
where
-- This test avoids any possible issue with reconstructing the reserves of the pool
tvl > 0
and p0.timestamp between least(date('{{start_time}}'), date_add('day', -1, date(now()))) and least(date_add('day', 1, date('{{start_time}}')), date(now()))
and p1.timestamp between least(date('{{start_time}}'), date_add('day', -1, date(now()))) and least(date_add('day', 1, date('{{start_time}}')), date(now()))
)

select
contract_address,
fee,
tvl,
volume,
apr
from accumulated_kpis
where latest_per_pool = 1
40 changes: 19 additions & 21 deletions cowamm/kpi/competitors/curve/curve_largest_2token_pools_4232976.sql
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
-- Parameters:
-- {{blockchain}}: The blockchain to query
-- {{number_of_pools}}: The number of largest pools to return
-- {{start_time}}: The start time of the analysis. date '{{start_time}}' <= evt_block_time < date '{{start_time}}' + 1 day
-- By default, we look at the past full day

with
-- filters pools with 2 tokens
Expand Down Expand Up @@ -54,46 +56,42 @@ reserves as (
token0,
token1,
tx_hash,
block_time,
block_time as evt_block_time,
fee,
sum(transfer0) over (partition by contract_address order by block_time, evt_index) as reserve0,
sum(transfer1) over (partition by contract_address order by block_time, evt_index) as reserve1,
row_number() over (partition by tx_hash, contract_address order by evt_index desc) as latest_per_tx,
row_number() over (partition by contract_address order by block_time desc) as latest_per_pool
row_number() over (partition by contract_address order by block_time desc, evt_index desc) as latest_per_pool
from transfers
where block_time <= least(date_add('day', 1, date('{{start_time}}')), date(now()))
),

-- finds the TVL of the pools
recent_tvl as (
latest_tvl as (
select
r.contract_address,
token0,
token1,
block_time,
tx_hash,
reserve0,
reserve1,
fee,
latest_per_pool,
(reserve0 * p0.price / pow(10, p0.decimals)) + (reserve1 * p1.price / pow(10, p1.decimals)) as tvl
from reserves as r
inner join prices.minute as p0
--using the daily value to get a better representation of the TVL over the 24 hour period
inner join prices.day as p0
on
date_trunc('minute', block_time) = p0.timestamp
p0.timestamp = least(date('{{start_time}}'), date_add('day', -1, date(now())))
and token0 = p0.contract_address
inner join prices.minute as p1
inner join prices.day as p1
on
date_trunc('minute', block_time) = p1.timestamp
p1.timestamp = least(date('{{start_time}}'), date_add('day', -1, date(now())))
and token1 = p1.contract_address
where latest_per_tx = 1
)


select * from recent_tvl
where contract_address in (
select contract_address
from recent_tvl
where latest_per_pool = 1
order by tvl desc
limit {{number_of_pools}}
)

select
reserves.*,
tvl
from reserves
inner join latest_tvl
on reserves.contract_address = latest_tvl.contract_address
where latest_per_tx = 1
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@
-- Parameters:
-- {{blockchain}}: The blockchain to query
-- {{number_of_pools}}: The number of largest pools to return
-- {{competitor_end_time}}: The end time of the time window (end_time - 1 day; end_time), defaults to now()
-- {{start_time}}: The start time of the analysis. date '{{start_time}}' <= evt_block_time < date '{{start_time}}' + 1 day
-- By default, we look at the past full day

-- select the pool with the largest latest k
with pool as (
Expand All @@ -13,7 +14,7 @@ with pool as (
token0,
token1,
tvl
from "query_4303563(blockchain='{{blockchain}}', number_of_pools = '{{number_of_pools}}')"
from "query_4303563(blockchain='{{blockchain}}', number_of_pools = '{{number_of_pools}}', start_time = '{{start_time}}')"
),

syncs as (
Expand All @@ -30,7 +31,7 @@ syncs as (
inner join pool
on logs.contract_address = pool.contract_address
where
block_time >= date_add('day', -1, (case when '{{competitor_end_time}}' = '2100-01-01' then now() else timestamp '{{competitor_end_time}}' end))
block_time between least(date('{{start_time}}'), date_add('day', -1, date(now()))) and least(date_add('day', 1, date('{{start_time}}')), date(now()))
and topic0 = 0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1 -- Sync
),

Expand All @@ -47,7 +48,7 @@ swaps as (
varbinary_to_uint256(substr(data, 97, 32)) as amount1Out
from {{blockchain}}.logs
where
block_time >= date(date_add('day', -1, now()))
block_time between least(date('{{start_time}}'), date_add('day', -1, date(now()))) and least(date_add('day', 1, date('{{start_time}}')), date(now()))
and topic0 = 0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822 -- Swap
and contract_address in (select contract_address from pool)
),
Expand Down Expand Up @@ -77,6 +78,9 @@ tvl_volume_per_swap as (
on
date_trunc('minute', syncs.evt_block_time) = p1.timestamp
and syncs.token1 = p1.contract_address
where
p0.timestamp between least(date('{{start_time}}'), date_add('day', -1, date(now()))) and least(date_add('day', 1, date('{{start_time}}')), date(now()))
and p1.timestamp between least(date('{{start_time}}'), date_add('day', -1, date(now()))) and least(date_add('day', 1, date('{{start_time}}')), date(now()))
)

select
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
-- Parameters:
-- {{blockchain}}: The blockchain to query
-- {{number_of_pools}}: The number of largest pools to return
-- {{start_time}}: The start time of the analysis. date '{{start_time}}' <= evt_block_time < date '{{start_time}}' + 1 day
-- By default, we look at the past full day

with pools as (
select
Expand Down Expand Up @@ -54,7 +56,7 @@ syncs as (
date_trunc('minute', block_time) as evt_block_time,
varbinary_to_uint256(substr(data, 1, 32)) as reserve0,
varbinary_to_uint256(substr(data, 33, 32)) as reserve1,
rank() over (partition by (logs.contract_address) order by block_time desc, index asc) as latest
rank() over (partition by (logs.contract_address) order by block_time desc, index desc) as latest
from {{blockchain}}.logs
inner join pools
on logs.contract_address = pools.contract_address
Expand All @@ -72,14 +74,13 @@ select distinct
evt_block_time,
reserve0 * p0.price * power(10, -p0.decimals) + reserve1 * p1.price * power(10, -p1.decimals) as tvl
from syncs as s
inner join prices.minute as p0
on
token0 = p0.contract_address
and p0.timestamp = evt_block_time
inner join prices.minute as p1
on
token1 = p1.contract_address
and p1.timestamp = evt_block_time
where latest = 1
inner join prices.day as p0
on token0 = p0.contract_address
inner join prices.day as p1
on token1 = p1.contract_address
where
latest = 1
and p0.timestamp = least(date('{{start_time}}'), date_add('day', -1, date(now())))
and p1.timestamp = least(date('{{start_time}}'), date_add('day', -1, date(now())))
order by tvl desc
limit {{number_of_pools}}
Loading