Skip to content

Commit

Permalink
Merge pull request #2055 from nervosnetwork/testnet
Browse files Browse the repository at this point in the history
Deploy to mainnet
  • Loading branch information
rabbitz committed Jul 15, 2024
2 parents 417476c + 36b14f5 commit d440e8d
Show file tree
Hide file tree
Showing 3 changed files with 65 additions and 44 deletions.
2 changes: 1 addition & 1 deletion app/controllers/api/v2/bitcoin_addresses_controller.rb
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ def rgb_cells
vouts = BitcoinVout.where(bitcoin_transaction_id: vout.bitcoin_transaction_id, index: vout.index).includes(:cell_output).where(
cell_outputs: { status: "live" },
)
hash[[tx.tx_hash, vout.index]] = vouts.map { |v| CellOutputSerializer.new(v.cell_output).serializable_hash }
hash[[tx.txid, vout.index]] = vouts.map { |v| CellOutputSerializer.new(v.cell_output).serializable_hash }
end

render json: { data: { rgb_cells: cells }, meta: { total: bitcoin_vouts.total_count, page_size: @page_size } }
Expand Down
73 changes: 45 additions & 28 deletions app/jobs/csv_exportable/export_contract_transactions_job.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,31 +3,49 @@
module CsvExportable
class ExportContractTransactionsJob < BaseExporter
def perform(args)
dao_contract = DaoContract.default_contract
ckb_transactions = dao_contract.ckb_transactions.includes(dao_events: [:address]).tx_committed
start_date, end_date = extract_dates(args)
transaction_rows = fetch_transaction_rows(start_date, end_date)
header = [
"Txn hash", "Address", "Blockno", "UnixTimestamp", "Method",
"Amount", "Token", "TxnFee(CKB)", "date(UTC)"
]
generate_csv(header, transaction_rows)
end

if args[:start_date].present?
start_date = BigDecimal(args[:start_date])
ckb_transactions = ckb_transactions.where("ckb_transactions.block_timestamp >= ?", start_date)
end
private

if args[:end_date].present?
end_date = BigDecimal(args[:end_date])
ckb_transactions = ckb_transactions.where("ckb_transactions.block_timestamp <= ?", end_date)
end
def extract_dates(args)
start_date = args[:start_date].present? ? BigDecimal(args[:start_date]) : nil
end_date = args[:end_date].present? ? BigDecimal(args[:end_date]) : nil
start_number = args[:start_number].presence
end_number = args[:end_number].presence

if args[:start_number].present?
ckb_transactions = ckb_transactions.where("ckb_transactions.block_number >= ?", args[:start_number])
if start_number.present?
start_date = Block.find_by(number: start_number)&.timestamp
end

if args[:end_number].present?
ckb_transactions = ckb_transactions.where("ckb_transactions.block_number <= ?", args[:end_number])
if end_number.present?
end_date = Block.find_by(number: end_number)&.timestamp
end

ckb_transactions = ckb_transactions.order("ckb_transactions.block_timestamp desc nulls last, ckb_transactions.id desc").limit(5000)
[start_date, end_date]
end

def build_sql_query(start_date, end_date)
sql = "".dup
sql << "block_timestamp >= #{start_date}" if start_date.present?
sql << " AND " if start_date.present? && end_date.present?
sql << "block_timestamp <= #{end_date}" if end_date.present?
sql
end

def fetch_transaction_rows(start_date, end_date)
sql = build_sql_query(start_date, end_date)
rows = []
ckb_transactions.find_in_batches(batch_size: 1000) do |transactions|

dao_contract = DaoContract.default_contract
dao_contract.ckb_transactions.includes(dao_events: [:address]).tx_committed.where(sql).
order("block_timestamp desc nulls last, id desc").limit(5000).find_in_batches(batch_size: 500) do |transactions|
transactions.each do |transaction|
row = generate_row(transaction)
next if row.blank?
Expand All @@ -36,27 +54,18 @@ def perform(args)
end
end

header = [
"Txn hash", "Address", "Blockno", "UnixTimestamp", "Method",
"Amount", "Token", "TxnFee(CKB)", "date(UTC)"
]

generate_csv(header, rows)
rows
end

def generate_row(transaction)
dao_events = transaction.dao_events
dao_events = transaction.dao_events.where(event_type: ["deposit_to_dao", "withdraw_from_dao", "issue_interest"])

rows = []
dao_events.each do |dao_event|
datetime = datetime_utc(transaction.block_timestamp)
fee = parse_transaction_fee(transaction.transaction_fee)
amount = CkbUtils.shannon_to_byte(BigDecimal(dao_event.value))
method = {
"deposit_to_dao" => "Deposit",
"withdraw_from_dao" => "Withdraw Request",
"issue_interest" => "Withdraw Finalization",
}[dao_event.event_type]
method = map_event_type(dao_event.event_type)

rows << [
transaction.tx_hash,
Expand All @@ -73,5 +82,13 @@ def generate_row(transaction)

rows
end

def map_event_type(event_type)
{
"deposit_to_dao" => "Deposit",
"withdraw_from_dao" => "Withdraw Request",
"issue_interest" => "Withdraw Finalization",
}[event_type]
end
end
end
34 changes: 19 additions & 15 deletions app/jobs/csv_exportable/export_dao_depositors_job.rb
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,12 @@ module CsvExportable
class ExportDaoDepositorsJob < BaseExporter
def perform(args)
start_date, end_date = extract_dates(args)
deposit_rows = fetch_deposit_rows(start_date, end_date)
withdrawing_rows = fetch_withdrawing_rows(start_date, end_date)
combined_rows = combine_rows(deposit_rows, withdrawing_rows)

rows = fetch_deposit_rows(start_date, end_date) + fetch_withdrawing_rows(start_date, end_date)

header = ["Address", "Capacity", "Txn hash", "Previous Txn hash", "UnixTimestamp", "date(UTC)"]
generate_csv(header, rows)
header = ["Address", "Capacity"]
generate_csv(header, combined_rows)
end

private
Expand Down Expand Up @@ -40,13 +41,13 @@ def build_sql_query(start_date, end_date)

def fetch_deposit_rows(start_date, end_date)
sql = build_sql_query(start_date, end_date)
rows = []
rows = {}

CellOutput.includes(:address).live.nervos_dao_deposit.where(sql).find_in_batches(batch_size: 500) do |cells|
cells.each do |cell|
address_hash = cell.address_hash
amount = CkbUtils.shannon_to_byte(BigDecimal(cell.capacity))
datetime = datetime_utc(cell.block_timestamp)
rows << [cell.address_hash, amount, cell.tx_hash, nil, cell.block_timestamp, datetime]
rows[address_hash] = rows.fetch(address_hash, 0) + amount
end
end

Expand All @@ -55,20 +56,23 @@ def fetch_deposit_rows(start_date, end_date)

def fetch_withdrawing_rows(start_date, end_date)
sql = build_sql_query(start_date, end_date)
rows = []
rows = {}

CellOutput.includes(:address).live.nervos_dao_withdrawing.where(sql).find_in_batches(batch_size: 500) do |cells|
ckb_transaction_ids = CellOutput.live.nervos_dao_withdrawing.where(sql).distinct.pluck(:ckb_transaction_id)
CellOutput.nervos_dao_deposit.includes(:address).where(consumed_by_id: ckb_transaction_ids).find_in_batches(batch_size: 500) do |cells|
cells.each do |cell|
cell_input = cell.ckb_transaction.cell_inputs.nervos_dao_deposit.first
previous_cell_output = cell_input.previous_cell_output
previous_tx_hash = previous_cell_output.tx_hash
amount = CkbUtils.shannon_to_byte(BigDecimal(previous_cell_output.capacity))
datetime = datetime_utc(previous_cell_output.block_timestamp)
rows << [previous_cell_output.address_hash, amount, cell.tx_hash, previous_tx_hash, previous_cell_output.block_timestamp, datetime]
address_hash = cell.address_hash
amount = CkbUtils.shannon_to_byte(BigDecimal(cell.capacity))
rows[address_hash] = rows.fetch(address_hash, 0) + amount
end
end

rows
end

def combine_rows(deposit_rows, withdrawing_rows)
combined = deposit_rows.merge(withdrawing_rows) { |_key, old_val, new_val| old_val + new_val }
combined.map { |address, amount| [address, amount] }
end
end
end

0 comments on commit d440e8d

Please sign in to comment.