Skip to content

Commit

Permalink
Merge pull request #1815 from singnet/development
Browse files Browse the repository at this point in the history
Update master branch
  • Loading branch information
AlbinaPomogalova authored May 24, 2024
2 parents 803024b + 978e177 commit 6a13a29
Show file tree
Hide file tree
Showing 31 changed files with 210 additions and 88 deletions.
12 changes: 7 additions & 5 deletions common/ipfs_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,23 +2,22 @@
import json
import logging

import ipfsapi
import ipfshttpclient


class IPFSUtil(object):

def __init__(self, ipfs_url, port):
self.ipfs_conn = ipfsapi.connect(host=ipfs_url, port=port)
self.ipfs_conn = ipfshttpclient.connect(f"/dns4/{ipfs_url}/tcp/{port}/http")

def read_bytesio_from_ipfs(self, ipfs_hash):

ipfs_data = self.ipfs_conn.cat(ipfs_hash)
f = io.BytesIO(ipfs_data)
return f

def write_file_in_ipfs(self, filepath, wrap_with_directory=True):
"""
push a file to ipfs given its path
Push a file to IPFS given its path.
"""
try:
with open(filepath, 'r+b') as file:
Expand All @@ -32,6 +31,9 @@ def write_file_in_ipfs(self, filepath, wrap_with_directory=True):
return ''

def read_file_from_ipfs(self, ipfs_hash):

"""
1. Get data from ipfs with ipfs_hash.
2. Deserialize data to python dict.
"""
ipfs_data = self.ipfs_conn.cat(ipfs_hash)
return json.loads(ipfs_data.decode('utf8'))
10 changes: 5 additions & 5 deletions common/test_blockchain_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

class TestUtils(unittest.TestCase):
def setUp(self):
self.net_id = 3
self.net_id = 11155111
self.http_provider = Web3.HTTPProvider(NETWORKS[self.net_id]['http_provider'])
self.obj_utils = BlockChainUtil(provider_type="HTTP_PROVIDER", provider=self.http_provider)
self.mpe_address = self.obj_utils.read_contract_address(net_id=self.net_id, path=MPE_ADDR_PATH, key='address')
Expand All @@ -35,14 +35,14 @@ def generate_signature(self, message_nonce, signer_key):

def test_create_account(self):
address, private_key = self.obj_utils.create_account()
assert (Web3.isAddress(address) == True)
assert (Web3.isAddress(address) is True)
return address, private_key

def test_create_transaction_object1(self):
method_name = "openChannelByThirdParty"
sender, sender_private_key = self.test_create_account()
message_nonce = self.obj_utils.get_current_block_no()
r, s, v, signature = self.generate_signature(message_nonce=message_nonce, signer_key=sender_private_key)
r, s, v, _ = self.generate_signature(message_nonce=message_nonce, signer_key=sender_private_key)
positional_inputs = (sender, SIGNER_ADDRESS, self.recipient, self.group_id, self.agi_tokens, self.expiration, message_nonce, v, r, s)
transaction_object = self.obj_utils.create_transaction_object(*positional_inputs, method_name=method_name,
address=EXECUTOR_ADDRESS,
Expand Down Expand Up @@ -76,7 +76,7 @@ def test_generate_signature_for_state_service(self):
self.current_block_no = 6487832
values = ["__get_channel_state", self.mpe_address, channel_id, self.current_block_no]
signature = self.obj_utils.generate_signature(data_types=data_types, values=values, signer_key=SIGNER_KEY)
v, r, s = Web3.toInt(hexstr="0x" + signature[-2:]), signature[:66], "0x" + signature[66:130]
_, _, _ = Web3.toInt(hexstr="0x" + signature[-2:]), signature[:66], "0x" + signature[66:130]
assert(signature == "0x0b9bb258a0f975328fd9cd9608bd9b570e7b68cad8d337c940e32b9413e348437dd3614f9c6f776b1eb62d521a5794204a010f581721f167c5b26de0928b139d1c")

def test_generate_signature_for_daemon_call(self):
Expand All @@ -86,5 +86,5 @@ def test_generate_signature_for_daemon_call(self):
data_types = ["string", "address", "uint256", "uint256", "uint256"]
values = ["__MPE_claim_message", self.mpe_address, channel_id, nonce, amount]
signature = self.obj_utils.generate_signature(data_types=data_types, values=values, signer_key=SIGNER_KEY)
v, r, s = Web3.toInt(hexstr="0x" + signature[-2:]), signature[:66], "0x" + signature[66:130]
_, _, _ = Web3.toInt(hexstr="0x" + signature[-2:]), signature[:66], "0x" + signature[66:130]
assert(signature == "0x7e50ac20909da29f72ed2ab9cf6c6375f853d8eddfcf3ce33806a4e27b30bcbd5366c41a59647467f0519b0bfc89a50d890b683cd797d5566ba03937f82819c41b")
8 changes: 4 additions & 4 deletions contract_api/config.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
NETWORKS = {
3: {
11155111: {
"name": "test",
"http_provider": "https://ropsten.infura.io",
"ws_provider": "wss://ropsten.infura.io/ws",
"http_provider": "https://sepolia.infura.io",
"ws_provider": "wss://sepolia.infura.io/ws",
"db": {
"DB_DRIVER": "mysql+pymysql",
"DB_HOST": "localhost",
Expand All @@ -23,7 +23,7 @@
'port': '80',

}
NETWORK_ID = 3
NETWORK_ID = 11155111
REGION_NAME = "us-east-2"
S3_BUCKET_ACCESS_KEY = ""
S3_BUCKET_SECRET_KEY = ""
Expand Down
2 changes: 1 addition & 1 deletion contract_api/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
"serverless-plugin-tracing": "2.0.0",
"serverless-prune-plugin": "1.4.1",
"serverless-python-requirements": "5.0.0",
"singularitynet-platform-contracts": "1.0.1",
"singularitynet-platform-contracts": "1.0.4",
"serverless-aws-documentation": "1.1.0"
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ def test_organziation_create_update_event(self, mock_get_org_details_from_blockc
organization_repository.delete_organization(org_id='snet')
organization_repository.delete_organization_groups(org_id='snet')
org_event_consumer = OrganizationCreatedEventConsumer("wss://ropsten.infura.io/ws",
"http://ipfs.singularitynet.io",
"ipfs.singularitynet.io",
80)
org_event_consumer.on_event(event=event)

Expand Down
4 changes: 2 additions & 2 deletions event_pubsub/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,8 @@
"serverless-plugin-tracing": "2.0.0",
"serverless-prune-plugin": "1.4.1",
"serverless-python-requirements": "5.0.0",
"singularitynet-platform-contracts": "1.0.1",
"singularitynet-stake-contracts": "1.0.0",
"singularitynet-platform-contracts": "1.0.4",
"singularitynet-stake-contracts": "1.0.4",
"singularitynet-rfai-contracts": "1.0.0",
"serverless-latest-layer-version": "2.1.1"
}
Expand Down
28 changes: 28 additions & 0 deletions registry/alembic/versions/3312b862c6cb_add_service_type.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
"""add service_type
Revision ID: 3312b862c6cb
Revises: 57deaf9ab42f
Create Date: 2024-03-26 10:22:48.873379
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql

# revision identifiers, used by Alembic.
revision = '3312b862c6cb'
down_revision = '57deaf9ab42f'
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('service', sa.Column('service_type', mysql.VARCHAR(length=128), nullable=True))
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('service', 'service_type')
# ### end Alembic commands ###
4 changes: 2 additions & 2 deletions registry/application/access_control/authorization.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from functools import reduce

from common.logger import get_logger
from common.utils import generate_lambda_response
from registry.exceptions import ForbiddenException
from registry.constants import Action, Role
from registry.infrastructure.repositories.organization_repository import OrganizationPublisherRepository

Expand Down Expand Up @@ -40,7 +40,7 @@ def wrapper(*args, **kwargs):
username = reduce(dict.get, username_path, event)
if is_access_allowed(username, action, org_uuid):
return func(*args, **kwargs)
raise generate_lambda_response(403, "Access Denied for the given User")
raise ForbiddenException()

return wrapper

Expand Down
5 changes: 4 additions & 1 deletion registry/application/handlers/organization_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ def create_organization(event, context):
@secured(action=Action.UPDATE, org_uuid_path=("pathParameters", "org_uuid"),
username_path=("requestContext", "authorizer", "claims", "email"))
def update_org(event, context):
logger.debug(f"Update org, event : {event}")
payload = json.loads(event["body"])
path_parameters = event["pathParameters"]
required_keys = []
Expand All @@ -87,6 +88,7 @@ def update_org(event, context):
@secured(action=Action.CREATE, org_uuid_path=("pathParameters", "org_uuid"),
username_path=("requestContext", "authorizer", "claims", "email"))
def publish_org_on_ipfs(event, context):
logger.debug(f"Publish org on ipfs, event : {event}")
path_parameters = event["pathParameters"]
username = event["requestContext"]["authorizer"]["claims"]["email"]
if "org_uuid" not in path_parameters:
Expand Down Expand Up @@ -121,6 +123,7 @@ def save_transaction_hash_for_publish_org(event, context):
@secured(action=Action.CREATE, org_uuid_path=("pathParameters", "org_uuid"),
username_path=("requestContext", "authorizer", "claims", "email"))
def get_all_members(event, context):
logger.debug(f"Get all members, event : {event}")
username = event["requestContext"]["authorizer"]["claims"]["email"]
path_parameters = event["pathParameters"]
query_parameters = event["queryStringParameters"]
Expand All @@ -140,6 +143,7 @@ def get_all_members(event, context):
@secured(action=Action.CREATE, org_uuid_path=("pathParameters", "org_uuid"),
username_path=("requestContext", "authorizer", "claims", "email"))
def get_member(event, context):
logger.debug(f"Get member, event : {event}")
username = event["requestContext"]["authorizer"]["claims"]["email"]
path_parameters = event["pathParameters"]
org_uuid = path_parameters["org_uuid"]
Expand Down Expand Up @@ -190,7 +194,6 @@ def publish_members(event, context):
username = event["requestContext"]["authorizer"]["claims"]["email"]
payload = json.loads(event["body"])
path_parameters = event["pathParameters"]

if "org_uuid" not in path_parameters or not validate_dict(payload, ["transaction_hash", "members"]):
raise BadRequestException()
org_uuid = path_parameters["org_uuid"]
Expand Down
3 changes: 1 addition & 2 deletions registry/application/handlers/service_handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def save_service(event, context):
username = event["requestContext"]["authorizer"]["claims"]["email"]
path_parameters = event["pathParameters"]
payload = json.loads(event["body"])
if not path_parameters.get("org_uuid", "") and not path_parameters.get("service_uuid", ""):
if not path_parameters.get("org_uuid") and not path_parameters.get("service_uuid"):
raise BadRequestException()
org_uuid = path_parameters["org_uuid"]
service_uuid = path_parameters["service_uuid"]
Expand Down Expand Up @@ -174,7 +174,6 @@ def get_daemon_config_for_current_network(event, context):
raise BadRequestException()
org_uuid = path_parameters["org_uuid"]
service_uuid = path_parameters["service_uuid"]
group_id = path_parameters["group_id"]
if query_parameters["network"] == EnvironmentType.TEST.value:
response = ServicePublisherService(username, org_uuid, service_uuid).daemon_config(
environment=EnvironmentType.TEST.value)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ def get_groups_for_org(self):
}

def create_organization(self, payload):
logger.info(f"create organization for user: {self.username}")
logger.info(f"create organization for user: {self.username}, payload: {payload}")
organization = OrganizationFactory.org_domain_entity_from_payload(payload)
organization.setup_id()
logger.info(f"assigned org_uuid : {organization.uuid}")
Expand Down
30 changes: 20 additions & 10 deletions registry/application/services/service_publisher_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,9 @@
from common.logger import get_logger
from common.utils import download_file_from_url, json_to_file, publish_zip_file_in_ipfs, send_email_notification
from registry.config import ASSET_DIR, IPFS_URL, METADATA_FILE_PATH, NETWORKS, NETWORK_ID, NOTIFICATION_ARN, \
REGION_NAME, PUBLISH_OFFCHAIN_ATTRIBUTES_ENDPOINT, GET_SERVICE_FOR_GIVEN_ORG_ENDPOINT, ASSETS_COMPONENT_BUCKET_NAME
REGION_NAME, PUBLISH_OFFCHAIN_ATTRIBUTES_ENDPOINT, GET_SERVICE_FOR_GIVEN_ORG_ENDPOINT
from registry.constants import EnvironmentType, ServiceAvailabilityStatus, ServiceStatus, \
ServiceSupportType, UserType
ServiceSupportType, UserType, ServiceType
from registry.domain.factory.service_factory import ServiceFactory
from registry.domain.models.demo_component import DemoComponent
from registry.domain.models.offchain_service_config import OffchainServiceConfig
Expand Down Expand Up @@ -85,7 +85,7 @@ def get_service_id_availability_status(self, service_id):

@staticmethod
def get_service_for_org_id_and_service_id(org_id, service_id):
org_uuid, service = ServicePublisherRepository().get_service_for_given_service_id_and_org_id(org_id, service_id)
_, service = ServicePublisherRepository().get_service_for_given_service_id_and_org_id(org_id, service_id)
if not service:
return {}
return service.to_dict()
Expand Down Expand Up @@ -124,12 +124,15 @@ def save_offline_service_configs(self, payload):
return

def save_service(self, payload):
logger.info(f"Save service with payload :: {payload}")
service = ServicePublisherRepository().get_service_for_given_service_uuid(self._org_uuid, self._service_uuid)
service.service_id = payload["service_id"]
service.proto = payload.get("proto", {})
service.display_name = payload.get("display_name", "")
service.short_description = payload.get("short_description", "")
service.description = payload.get("description", "")
service.project_url = payload.get("project_url", "")
service.service_type = payload.get("service_type", ServiceType.GRPC.value)
service.contributors = ServicePublisherService. \
_get_valid_service_contributors(contributors=payload.get("contributors", []))
service.tags = payload.get("tags", [])
Expand All @@ -140,6 +143,7 @@ def save_service(self, payload):
groups.append(service_group)
service.groups = groups
service.service_state.transaction_hash = payload.get("transaction_hash", None)
logger.info(f"Save service data with proto: {service.proto} and assets: {service.assets}")
ServicePublisherRepository().save_service(self._username, service, ServiceStatus.APPROVED.value)
comment = payload.get("comments", {}).get(UserType.SERVICE_PROVIDER.value, "")
if len(comment) > 0:
Expand Down Expand Up @@ -181,6 +185,7 @@ def create_service(self, payload):
service_uuid = uuid4().hex
service = ServiceFactory().create_service_entity_model(self._org_uuid, service_uuid, payload,
ServiceStatus.DRAFT.value)
logger.info(f"Creating service :: {service.to_dict()}")
ServicePublisherRepository().add_service(service, self._username)
return {"org_uuid": self._org_uuid, "service_uuid": service_uuid}

Expand Down Expand Up @@ -239,7 +244,8 @@ def get_service_for_given_service_uuid(self):
return service_data

def publish_service_data_to_ipfs(self):
service = ServicePublisherRepository().get_service_for_given_service_uuid(self._org_uuid, self._service_uuid)
service_publisher_repo = ServicePublisherRepository()
service = service_publisher_repo.get_service_for_given_service_uuid(self._org_uuid, self._service_uuid)
if service.service_state.state == ServiceStatus.APPROVED.value:
proto_url = service.assets.get("proto_files", {}).get("url", None)
if proto_url is None:
Expand All @@ -253,11 +259,11 @@ def publish_service_data_to_ipfs(self):
service.proto = {
"model_ipfs_hash": asset_ipfs_hash,
"encoding": "proto",
"service_type": "grpc"
"service_type": service.service_type
}
service.assets["proto_files"]["ipfs_hash"] = asset_ipfs_hash
ServicePublisherDomainService.publish_assets(service)
service = ServicePublisherRepository().save_service(self._username, service, service.service_state.state)
service = service_publisher_repo.save_service(self._username, service, service.service_state.state)
return service
logger.info(f"Service status needs to be {ServiceStatus.APPROVED.value} to be eligible for publishing.")
raise InvalidServiceStateException()
Expand Down Expand Up @@ -357,11 +363,13 @@ def get_offchain_changes(self, current_offchain_config, existing_offchain_config
demo_component_status=current_service.assets.get("demo_files", {}).get("status", "")
)
demo_changes = new_demo.to_dict()
demo_last_modifed = existing_demo.get("demo_component_last_modified", "")
demo_last_modified = existing_demo.get("demo_component_last_modified", "")
# if last_modified not there publish if it there and is greater than current last modifed publish
demo_changes.update({"change_in_demo_component": 1})
if demo_last_modifed and dt.fromisoformat(
demo_last_modifed) > dt.fromisoformat(current_service.assets.get("demo_files", {}).get("last_modified", "")):
current_demo_last_modified = current_service.assets.get("demo_files", {}).get("last_modified")
if demo_last_modified and \
(current_demo_last_modified is None or \
dt.fromisoformat(demo_last_modified) > dt.fromisoformat(current_demo_last_modified)):
demo_changes.update({"change_in_demo_component": 0})
changes.update({"demo_component": demo_changes})
return changes
Expand All @@ -370,7 +378,7 @@ def publish_offchain_service_configs(self, org_id, service_id, payload):
response = requests.post(
PUBLISH_OFFCHAIN_ATTRIBUTES_ENDPOINT.format(org_id, service_id), data=payload)
if response.status_code != 200:
raise Exception(f"Error in updating offchain service attributes")
raise Exception("Error in updating offchain service attributes")

def get_existing_service_details_from_contract_api(self, service_id, org_id):
response = requests.get(
Expand All @@ -397,8 +405,10 @@ def publish_service_data(self):
existing_metadata = {}
publish_to_blockchain = self.are_blockchain_attributes_got_updated(existing_metadata, current_service.to_metadata())
existing_offchain_configs = self.get_existing_offchain_configs(existing_service_data)

current_offchain_attributes = ServicePublisherRepository().get_offchain_service_config(org_uuid=self._org_uuid,
service_uuid=self._service_uuid)

new_offchain_attributes = self.get_offchain_changes(
current_offchain_config=current_offchain_attributes.configs,
existing_offchain_config=existing_offchain_configs,
Expand Down
Loading

0 comments on commit 6a13a29

Please sign in to comment.