Skip to content
This repository has been archived by the owner on Feb 3, 2022. It is now read-only.

update setting for cache port #3

Open
wants to merge 26 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -103,3 +103,4 @@ data/
# mypy
.mypy_cache/
.idea
.history/
2 changes: 1 addition & 1 deletion .gitmodules
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@
url = https://github.com/polkascan/py-substrate-interface.git
[submodule "py-scale-codec"]
path = py-scale-codec
url = https://github.com/polkascan/py-scale-codec.git
url = https://github.com/ProChain/py-scale-codec.git
6 changes: 6 additions & 0 deletions app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,12 @@
app.add_route('/networkstats/{network_id}', polkascan.NetworkStatisticsResource())
app.add_route('/balances/transfer', polkascan.BalanceTransferListResource())
app.add_route('/balances/transfer/{item_id}', polkascan.BalanceTransferDetailResource())
app.add_route('/transfer/{did}', polkascan.TransferListResource())
app.add_route('/did', polkascan.DidListResource())
app.add_route('/did/{item_id}', polkascan.DidDetailResource())
app.add_route('/did/social_account/{item_id}', polkascan.DidDetailBySocialAccountResource())
app.add_route('/did/members/{did_hash}', polkascan.DidMembersResource())
app.add_route('/did/invite_ranking',polkascan.DidInviteRanking())
app.add_route('/account', polkascan.AccountResource())
app.add_route('/account/{item_id}', polkascan.AccountDetailResource())
app.add_route('/accountindex', polkascan.AccountIndexListResource())
Expand Down
33 changes: 33 additions & 0 deletions app/models/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -873,3 +873,36 @@ class RuntimeType(BaseModel):
spec_version = sa.Column(sa.Integer(), nullable=False)
type_string = sa.Column(sa.String(255))
decoder_class = sa.Column(sa.String(255), nullable=True)

class Transfer(BaseModel):
__tablename__ = 'data_transfer'

block_id = sa.Column(sa.Integer(), primary_key=True)
block = relationship(Block, foreign_keys=[block_id], primaryjoin=block_id == Block.id)

event_idx = sa.Column(sa.Integer(), primary_key=True)

extrinsic_idx = sa.Column(sa.Integer())
from_did = sa.Column(sa.String(44),index = True)
from_account_id = sa.Column(sa.String(64),index=True)
to_did = sa.Column(sa.String(44),index = True)
to_account_id = sa.Column(sa.String(64),index=True)
balance = sa.Column(sa.Numeric(precision=65, scale=0), nullable=False)
fee = sa.Column(sa.Numeric(precision=18, scale=0), nullable=False)
datetime = sa.Column(sa.DateTime(timezone=True))


def serialize_id(self):
return '{}-{}'.format(self.block_id, self.event_idx)

class Did(BaseModel):
__tablename__ = 'data_did'

did = sa.Column(sa.String(50), primary_key=True)
address = sa.Column(sa.String(48),index=True)
superior = sa.Column(sa.String(66),index=True)
did_hash = sa.Column(sa.String(66),index=True)
creator = sa.Column(sa.String(50))
social_account_hash = sa.Column(sa.String(66),index =True)
def serialize_id(self):
return self.did
82 changes: 77 additions & 5 deletions app/resources/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from sqlalchemy.orm import Session

from app.settings import MAX_RESOURCE_PAGE_SIZE, DOGPILE_CACHE_SETTINGS

from app.models.data import Did

class BaseResource(object):

Expand All @@ -46,6 +46,33 @@ def get_meta(self):
def serialize_item(self, item):
return item.serialize()

#["address","address2"]
def convert_to_did_items(self):
return []

def convert_to_did(self,data):
items = self.convert_to_did_items()
if items and len(items)>0:
if isinstance(data,list):
address = []
for row in data:
for item in items:
address.append(row['attributes'][item])
alldid = Did.query(self.session).filter(Did.address.in_(address)).all()
did_map = dict([(did.address,did.did) for did in alldid])
for row in data:
for item in items:
if row['attributes'][item] in did_map:
row['attributes'][item+'_source'] = row['attributes'][item]
row['attributes'][item] = did_map[row['attributes'][item]]
else:
for item in items:
did = Did.query(self.session).filter_by(address=data['attributes'][item]).first()
if did:
data['attributes'][item+'_source'] = data['attributes'][item]
data['attributes'][item] = did.did
return data

def process_get_response(self, req, resp, **kwargs):
return {
'status': falcon.HTTP_200,
Expand All @@ -54,7 +81,7 @@ def process_get_response(self, req, resp, **kwargs):
}

def get_jsonapi_response(self, data, meta=None, errors=None, links=None, relationships=None, included=None):

data = self.convert_to_did(data)
result = {
'meta': {
"authors": [
Expand All @@ -67,7 +94,6 @@ def get_jsonapi_response(self, data, meta=None, errors=None, links=None, relatio
"data": data,
"links": {}
}

if meta:
result['meta'].update(meta)

Expand Down Expand Up @@ -135,7 +161,8 @@ def process_get_response(self, req, resp, **kwargs):
items = self.get_query()
items = self.apply_filters(items, req.params)
items = self.apply_paging(items, req.params)

print(len(items))
print(items)
return {
'status': falcon.HTTP_200,
'media': self.get_jsonapi_response(
Expand All @@ -145,7 +172,52 @@ def process_get_response(self, req, resp, **kwargs):
'cacheable': True
}

class JSONAPIListResource2(JSONAPIResource, ABC):

cache_expiration_time = DOGPILE_CACHE_SETTINGS['default_list_cache_expiration_time']

def get_item_url_name(self):
return 'item_id'

@abstractmethod
def get_query(self,item_id):
raise NotImplementedError()

def apply_paging(self, query, params):
page = int(params.get('page[number]', 1)) - 1
page_size = min(int(params.get('page[size]', 25)), MAX_RESOURCE_PAGE_SIZE)
return query[page * page_size: page * page_size + page_size]

def has_total(self):
return False

def process_get_response(self, req, resp, **kwargs):
items = self.get_query(kwargs.get(self.get_item_url_name()))
items = self.apply_filters(items, req.params)
total = -1
if self.has_total():
total = items.count()
items = self.apply_paging(items, req.params)
if total >= 0:
meta = self.get_meta()
meta['total'] = total
return {
'status': falcon.HTTP_200,
'media': self.get_jsonapi_response(
data = [self.serialize_item(item) for item in items],
meta = meta
),
'cacheable': True
}
else:
return {
'status': falcon.HTTP_200,
'media': self.get_jsonapi_response(
data = [self.serialize_item(item) for item in items],
meta=self.get_meta()
),
'cacheable': True
}
class JSONAPIDetailResource(JSONAPIResource, ABC):

cache_expiration_time = DOGPILE_CACHE_SETTINGS['default_detail_cache_expiration_time']
Expand Down Expand Up @@ -175,7 +247,7 @@ def process_get_response(self, req, resp, **kwargs):
response = {
'status': falcon.HTTP_200,
'media': self.get_jsonapi_response(
data=self.serialize_item(item),
data= self.serialize_item(item),
relationships=self.get_relationships(req.params.get('include') or [], item),
meta=self.get_meta()
),
Expand Down
Loading