Skip to content

Commit

Permalink
Merge pull request #2063 from nervosnetwork/develop
Browse files Browse the repository at this point in the history
Deploy to testnet
  • Loading branch information
zmcNotafraid committed Jul 15, 2024
2 parents 36b14f5 + 68746b0 commit fb5187f
Show file tree
Hide file tree
Showing 10 changed files with 1,046 additions and 104 deletions.
15 changes: 6 additions & 9 deletions app/models/cell_output.rb
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
class CellOutput < ApplicationRecord
include CellOutputs::ExtraInfo

self.primary_key = :id

SYSTEM_TX_HASH = "0x0000000000000000000000000000000000000000000000000000000000000000".freeze
MAXIMUM_DOWNLOADABLE_SIZE = 64000
MIN_SUDT_AMOUNT_BYTESIZE = 16
Expand Down Expand Up @@ -298,11 +300,10 @@ def self.update_cell_types_for_cota
#
# id :bigint not null, primary key
# capacity :decimal(64, 2)
# data :binary
# ckb_transaction_id :bigint
# created_at :datetime not null
# updated_at :datetime not null
# status :integer default("live")
# status :integer default("live"), not null
# address_id :decimal(30, )
# block_id :decimal(30, )
# tx_hash :binary
Expand All @@ -322,17 +323,13 @@ def self.update_cell_types_for_cota
#
# Indexes
#
# index_cell_outputs_on_address_id_and_status (address_id,status)
# index_cell_outputs_on_address_id (address_id)
# index_cell_outputs_on_block_id (block_id)
# index_cell_outputs_on_block_timestamp (block_timestamp)
# index_cell_outputs_on_cell_type (cell_type)
# index_cell_outputs_on_ckb_transaction_id_and_cell_index (ckb_transaction_id,cell_index) UNIQUE
# index_cell_outputs_on_consumed_block_timestamp (consumed_block_timestamp)
# index_cell_outputs_on_consumed_by_id (consumed_by_id)
# index_cell_outputs_on_data_hash (data_hash) USING hash
# index_cell_outputs_on_lock_script_id (lock_script_id)
# index_cell_outputs_on_status (status)
# index_cell_outputs_on_tx_hash_and_cell_index (tx_hash,cell_index) UNIQUE
# index_cell_outputs_on_tx_hash_and_cell_index_and_status (tx_hash,cell_index,status) UNIQUE
# index_cell_outputs_on_tx_id_and_cell_index_and_status (ckb_transaction_id,cell_index,status) UNIQUE
# index_cell_outputs_on_type_script_id (type_script_id)
# index_cell_outputs_on_type_script_id_and_id (type_script_id,id)
#
11 changes: 7 additions & 4 deletions app/models/ckb_sync/new_node_data_processor.rb
Original file line number Diff line number Diff line change
Expand Up @@ -846,7 +846,9 @@ def build_cells_and_locks!(
build_cell_outputs!(node_block, outputs, ckb_txs, local_block, cell_outputs_attributes, output_capacities, tags,
udt_address_ids, dao_address_ids, contained_udt_ids, contained_addr_ids, addrs_changes, token_transfer_ckb_tx_ids)
if cell_outputs_attributes.present?
id_hashes = CellOutput.upsert_all(cell_outputs_attributes, unique_by: %i[tx_hash cell_index],
tx_ids = cell_outputs_attributes.map { |attr| attr[:ckb_transaction_id] }
CellOutput.pending.where(ckb_transaction_id: tx_ids).update_all(status: :live)
id_hashes = CellOutput.upsert_all(cell_outputs_attributes, unique_by: %i[tx_hash cell_index status],
returning: %i[id data_hash])
cell_data_attrs = []

Expand All @@ -873,8 +875,11 @@ def build_cells_and_locks!(
CellInput.upsert_all(cell_inputs_attributes,
unique_by: %i[ckb_transaction_id index])
if prev_cell_outputs_attributes.present?
cell_ouput_ids = prev_cell_outputs_attributes.map { |attr| attr[:id] }
CellOutput.live.where(id: cell_ouput_ids).update_all(status: :dead)
CellOutput.upsert_all(prev_cell_outputs_attributes,
unique_by: %i[tx_hash cell_index])
unique_by: %i[tx_hash cell_index status],
record_timestamps: true)
end

ScriptTransaction.create_from_scripts TypeScript.where(id: type_script_ids)
Expand Down Expand Up @@ -1221,11 +1226,9 @@ def cell_input_attributes(input, ckb_transaction_id, local_block_id,
previous_output: {
id: previous_output.id,
cell_type: previous_output.cell_type,
created_at: previous_output.created_at,
tx_hash: input.previous_output.tx_hash,
cell_index: input.previous_output.index,
status: "dead",
updated_at: Time.current,
consumed_by_id: ckb_transaction_id,
consumed_block_timestamp: CkbTransaction.find(ckb_transaction_id).block_timestamp,
},
Expand Down
1 change: 0 additions & 1 deletion app/models/referring_cell.rb
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,5 @@ def self.create_initial_data_for_ckb_transaction(ckb_transaction)
#
# Indexes
#
# index_referring_cells_on_cell_output_id (cell_output_id) UNIQUE
# index_referring_cells_on_contract_id_and_cell_output_id (contract_id,cell_output_id) UNIQUE
#
12 changes: 6 additions & 6 deletions app/workers/generate_statistics_data_worker.rb
Original file line number Diff line number Diff line change
Expand Up @@ -15,25 +15,25 @@ def perform(block_id)
epoch_stats.update(largest_block_size: block_size, largest_block_number: block.number)
end

cell_outputs = block.cell_outputs.select(:id, :created_at, :data, :capacity, :lock_script_id, :type_script_id).to_a
cell_outputs = block.cell_outputs.includes(:cell_datum)
cell_outputs_attributes = []
cell_outputs.each do |cell_output|
data_size =
if cell_output.data
if cell_output.data != "0x"
CKB::Utils.hex_to_bin(cell_output.data).bytesize
else
0
end

cell_outputs_attributes << {
id: cell_output.id,
tx_hash: cell_output.tx_hash,
cell_index: cell_output.cell_index,
status: cell_output.status,
data_size:,
occupied_capacity: CkbUtils.calculate_cell_min_capacity(cell_output.node_output, cell_output.data),
created_at: cell_output.created_at,
updated_at: Time.current,
}
end

CellOutput.upsert_all(cell_outputs_attributes) if cell_outputs_attributes.present?
CellOutput.upsert_all(cell_outputs_attributes, unique_by: %i[tx_hash cell_index status], record_timestamps: true) if cell_outputs_attributes.present?
end
end
8 changes: 4 additions & 4 deletions config/database.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,14 @@ default: &default
# http://guides.rubyonrails.org/configuring.html#database-pooling
host: <%= ENV.fetch("DB_HOST") { "localhost" } %>
port: <%= ENV.fetch("DB_PORT") { "5432" } %>
pool: <%= ENV.fetch("RAILS_MAX_THREADS") { 25 } %>
pool: <%= ENV.fetch("DB_POOL") { 5 } %>
username: <%= ENV.fetch("DB_USERNAME") { "postgres" } %>
password: <%= ENV.fetch("DB_PASSWORD") { "postgres" } %>
reaping_frequency: 60 # every 60s check pool
timeout: 5000 # over 5s will timeout
variables:
statement_timeout: 120000 # query timemout over 120s
idle_in_transaction_session_timeout: 60000 # idle transaction over 60s
statement_timeout: 60000 # query timemout over 60s
idle_in_transaction_session_timeout: 120000 # idle transaction over 120s

development:
<<: *default
Expand Down Expand Up @@ -88,5 +88,5 @@ test:
# url: <%= ENV['DATABASE_URL'] %>
#
production:
<<: *default
url: <%= ENV['DATABASE_URL'] %>
pool: <%= ENV["DB_POOL"] || ENV['RAILS_MAX_THREADS'] || 5 %>
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
class RenameCellOutputsToCellOutputsOld < ActiveRecord::Migration[7.0]
def up
rename_table :cell_outputs, :cell_outputs_old
end

def down
rename_table :cell_outputs_old, :cell_outputs
end
end
47 changes: 47 additions & 0 deletions db/migrate/20240709131132_create_cell_outputs.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
class CreateCellOutputs < ActiveRecord::Migration[7.0]
def up
execute <<-SQL
CREATE TABLE cell_outputs (
id bigserial NOT NULL,
capacity numeric(64,2),
ckb_transaction_id bigint,
created_at timestamp without time zone NOT NULL,
updated_at timestamp without time zone NOT NULL,
status smallint DEFAULT 0,
address_id numeric(30,0),
block_id numeric(30,0),
tx_hash bytea,
cell_index integer,
consumed_by_id numeric(30,0),
cell_type integer DEFAULT 0,
data_size integer,
occupied_capacity numeric(30,0),
block_timestamp numeric(30,0),
consumed_block_timestamp numeric(30,0),
type_hash character varying,
udt_amount numeric(40,0),
dao character varying,
lock_script_id bigint,
type_script_id bigint,
data_hash bytea,
primary key (id, status)
) PARTITION BY LIST (status);
CREATE TABLE cell_outputs_live PARTITION OF cell_outputs
FOR VALUES IN (0);
CREATE TABLE cell_outputs_dead PARTITION OF cell_outputs
FOR VALUES IN (1);
CREATE TABLE cell_outputs_pending PARTITION OF cell_outputs
FOR VALUES IN (2);
CREATE TABLE cell_outputs_rejected PARTITION OF cell_outputs
FOR VALUES IN (3);
SQL
end

def down
drop_table :cell_outputs
end
end
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
class ImportCellOutputsOldToCellOutputs < ActiveRecord::Migration[7.0]
def up
execute <<~SQL
SET statement_timeout = 0;
INSERT INTO cell_outputs (id, capacity, ckb_transaction_id, status, address_id, block_id, tx_hash, cell_index, consumed_by_id, cell_type, data_size, occupied_capacity, block_timestamp, consumed_block_timestamp, type_hash, udt_amount, dao, lock_script_id, type_script_id, data_hash, created_at, updated_at)
SELECT id, capacity, ckb_transaction_id, status, address_id, block_id, tx_hash, cell_index, consumed_by_id, cell_type, data_size, occupied_capacity, block_timestamp, consumed_block_timestamp, type_hash, udt_amount, dao, lock_script_id, type_script_id, data_hash, created_at, updated_at FROM cell_outputs_old;
SELECT setval('cell_outputs_id_seq', (SELECT max(id) FROM cell_outputs));
SQL
end

def down
execute <<~SQL
TRUNCATE TABLE cell_outputs;
SQL
end
end
13 changes: 13 additions & 0 deletions db/migrate/20240709142013_add_index_to_cell_outputs.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
class AddIndexToCellOutputs < ActiveRecord::Migration[7.0]
def change
add_index :cell_outputs, :address_id
add_index :cell_outputs, :block_id
add_index :cell_outputs, :consumed_by_id
add_index :cell_outputs, :lock_script_id
add_index :cell_outputs, :type_script_id
add_index :cell_outputs, %i[ckb_transaction_id cell_index status], unique: true, name: "index_cell_outputs_on_tx_id_and_cell_index_and_status"
add_index :cell_outputs, %i[tx_hash cell_index status], unique: true, name: "index_cell_outputs_on_tx_hash_and_cell_index_and_status"
add_index :cell_outputs, :block_timestamp
add_index :cell_outputs, :consumed_block_timestamp
end
end
Loading

0 comments on commit fb5187f

Please sign in to comment.