Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[SPT-284] added training_indicator, make some cleanup #1800

Open
wants to merge 39 commits into
base: development
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
9d4a0ec
[SPT-284] update service model
mirel-dev Apr 27, 2024
763569f
[SPT-284] add functionality for detecting training indicator in proto
mirel-dev Apr 27, 2024
9e9856a
[FIX] cleanup and add type hints
mirel-dev Apr 27, 2024
12e3f03
[SPT-248] add update_service_training_indicator method in service repo
mirel-dev Apr 27, 2024
8f0d6c8
[SPT-284] fix migrations
mirel-dev May 2, 2024
e5d025c
[SPT-284] update test for get_service_metadata method
mirel-dev May 2, 2024
4c41952
[SPT-284] update service metadata validation scheme
mirel-dev May 2, 2024
274918e
[SPT-284] update ServicePublisherDomainService
mirel-dev May 2, 2024
79a2862
[SPT-284] fix TestServicePublisherDomainService
mirel-dev May 2, 2024
796922d
[SPT-284] update service factory
mirel-dev May 3, 2024
cffa4c4
[SPT-284] fix save_service
mirel-dev May 3, 2024
7e7d766
[FIX] update service entity model
mirel-dev May 3, 2024
72eb847
[FIX] update TestServicePublisherDomainService
mirel-dev May 3, 2024
d1479b8
[FIX] update ServiceEventConsumer
mirel-dev May 6, 2024
3826786
[FIX] update testcases for registry
mirel-dev May 6, 2024
ef2fb4e
[FIX] update create_service_entity_model
mirel-dev May 6, 2024
e39add4
[FIX] update convert_service_state_from_db
mirel-dev May 6, 2024
2cce047
[FIX] update service factory and TestService
mirel-dev May 6, 2024
ac09ea8
[FIX] updatte service factory
mirel-dev May 6, 2024
1a945ef
[FIX] update convert_service_state_entity_model_to_db_model
mirel-dev May 8, 2024
7467f9b
[SPT-284] update proto processing
mirel-dev May 13, 2024
560a42b
[SPT-284] add logger in proto_compilation
mirel-dev May 13, 2024
156dc1c
[SPT-284] update response objects in requests for lambda compilation
mirel-dev May 13, 2024
06a0ee2
[SPT-284] fix logging in lambda invocation
mirel-dev May 13, 2024
b373a7c
[FIX] syntax error
mirel-dev May 13, 2024
de665cf
[SPT-284] update logger level
mirel-dev May 13, 2024
14cb397
[fix] response parsing
mirel-dev May 13, 2024
2f38786
[FIX] response parsing
mirel-dev May 13, 2024
48e94ab
[FIX] add prints for debug
mirel-dev May 13, 2024
d146699
[FIX] add logs
mirel-dev May 14, 2024
7cf6195
[FIX] add logs
mirel-dev May 14, 2024
acb249d
[FIX] add logs
mirel-dev May 15, 2024
8cff1d9
[FIX] update logs
mirel-dev May 15, 2024
8233390
[SPT-284] add training_indicator in contract-api
mirel-dev May 15, 2024
90ebcd7
[FIX] update responses
mirel-dev May 15, 2024
51fd252
[TESTING]
mirel-dev May 15, 2024
f49c978
[TESTING]
mirel-dev May 16, 2024
afacdf9
[SPT-284] update test variables
mirel-dev May 16, 2024
c5d588d
[TESTING]
mirel-dev May 16, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -119,4 +119,7 @@ event_pubsub/node_modules/
orchestrator/node_modules/*
orchestrator/package-lock.json
utility/node_modules/*
utility/package-lock.json
utility/package-lock.json

*Pipfile
*Pipfile.lock
54 changes: 28 additions & 26 deletions common/boto_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,35 +13,37 @@ class BotoUtils:
def __init__(self, region_name):
self.region_name = region_name

def get_ssm_parameter(self, parameter, config=Config(retries={'max_attempts': 1})):
""" Format config=Config(connect_timeout=1, read_timeout=0.1, retries={'max_attempts': 1}) """
ssm = boto3.client('ssm', region_name=self.region_name, config=config)
def get_ssm_parameter(self, parameter, config=Config(retries={"max_attempts": 1})):
""" Format config=Config(connect_timeout=1, read_timeout=0.1, retries={"max_attempts": 1}) """
ssm = boto3.client("ssm", region_name=self.region_name, config=config)
parameter = ssm.get_parameter(Name=parameter, WithDecryption=True)
return parameter["Parameter"]["Value"]

def invoke_lambda(self, lambda_function_arn, invocation_type, payload, config=Config(retries={'max_attempts': 1})):
""" Format config=Config(connect_timeout=1, read_timeout=0.1, retries={'max_attempts': 1}) """
lambda_client = boto3.client('lambda', region_name=self.region_name, config=config)
def invoke_lambda(self, lambda_function_arn, invocation_type, payload, config=Config(retries={"max_attempts": 1})):
""" Format config=Config(connect_timeout=1, read_timeout=0.1, retries={"max_attempts": 1}) """
logger.info(f"Invoke lambda :: {lambda_function_arn}")
lambda_client = boto3.client("lambda", region_name=self.region_name, config=config)
lambda_response = lambda_client.invoke(FunctionName=lambda_function_arn, InvocationType=invocation_type,
Payload=payload)
logger.info(f"Get lambda response body :: {lambda_response.get('Payload').read()}")
if invocation_type == "Event":
return lambda_response
return json.loads(lambda_response.get('Payload').read())
return json.loads(lambda_response.get("Payload").read())

def s3_upload_file(self, filename, bucket, key):
s3_client = boto3.client('s3')
s3_client = boto3.client("s3")
s3_client.upload_file(filename, bucket, key)

def s3_download_file(self, bucket, key, filename):
s3_client = boto3.client('s3')
s3_client = boto3.client("s3")
s3_client.download_file(bucket, key, filename)

def get_parameter_value_from_secrets_manager(self, secret_name):
config = Config(retries=dict(max_attempts=2))
session = boto3.session.Session()
client = session.client(service_name='secretsmanager', region_name=self.region_name, config=config)
client = session.client(service_name="secretsmanager", region_name=self.region_name, config=config)
try:
parameter_value = client.get_secret_value(SecretId=secret_name)['SecretString']
parameter_value = client.get_secret_value(SecretId=secret_name)["SecretString"]
except ClientError as e:
logger.error(f"Failed to fetch credentials {e}")
raise e
Expand All @@ -52,18 +54,18 @@ def get_parameter_value_from_secrets_manager(self, secret_name):
@staticmethod
def delete_objects_from_s3(bucket, key, key_pattern):
if key_pattern in key:
s3_client = boto3.client('s3')
s3_client = boto3.client("s3")
s3_client.delete_object(Bucket=bucket, Key=key)

@staticmethod
def get_objects_from_s3(bucket, key):
s3 = boto3.client('s3')
s3 = boto3.client("s3")
objects = []
paginator = s3.get_paginator('list_objects_v2')
paginator = s3.get_paginator("list_objects_v2")
pages = paginator.paginate(Bucket=bucket, Prefix=key)
for page in pages:
if page.get("Contents", None):
for obj in page['Contents']:
for obj in page["Contents"]:
objects.append(obj)
return objects

Expand All @@ -89,45 +91,45 @@ def download_folder_contents_from_s3(self, bucket, key, target):
try:
keys = self.get_objects_from_s3(bucket=bucket, key=key)
for object_key in keys:
path, filename = os.path.split(object_key['Key'])
file_or_folder = object_key['Key'].replace(key,"")
path, filename = os.path.split(object_key["Key"])
file_or_folder = object_key["Key"].replace(key,"")
sub_folder_structure = ""
if "/" in file_or_folder:
sub_folder_structure = path.replace(key, "")
target_path = os.path.join(target, sub_folder_structure)
if not os.path.exists(target_path):
os.makedirs(target_path)
self.s3_download_file(bucket=bucket, key=object_key['Key'], filename=os.path.join(target_path, filename))
self.s3_download_file(bucket=bucket, key=object_key["Key"], filename=os.path.join(target_path, filename))
except Exception as e:
raise e

@staticmethod
def get_code_build_details(build_ids):
try:
build_client = boto3.client('codebuild')
build_client = boto3.client("codebuild")
return build_client.batch_get_builds(ids=build_ids)
except build_client.exceptions.InvalidInputException as e:
raise Exception(f"build id is not found {build_ids}")

@staticmethod
def trigger_code_build(build_details):
try:
cb = boto3.client('codebuild')
cb = boto3.client("codebuild")
build = cb.start_build(**build_details)
return build
except Exception as e:
raise e

def move_s3_objects(self, source_bucket, source_key, target_bucket, target_key, clear_destination=False):
s3 = boto3.resource('s3')
s3 = boto3.resource("s3")
source_objects = self.get_objects_from_s3(bucket=source_bucket, key=source_key)
dest_bucket = s3.Bucket(target_bucket)
if clear_destination:
destination_key = target_key[:-1] if target_key.endswith('/') else target_key
destination_key = target_key[:-1] if target_key.endswith("/") else target_key
target_objects = self.get_objects_from_s3(bucket=target_bucket, key=destination_key)
for key in target_objects:
self.delete_objects_from_s3(bucket=target_bucket, key=key['Key'], key_pattern=destination_key)
self.delete_objects_from_s3(bucket=target_bucket, key=key["Key"], key_pattern=destination_key)
for source_key in source_objects:
copy_source = {'Bucket': source_bucket, 'Key': source_key['Key']}
dest_bucket.copy(copy_source, target_key + os.path.basename(source_key['Key']))
s3.Object(source_bucket, source_key['Key']).delete()
copy_source = {"Bucket": source_bucket, "Key": source_key["Key"]}
dest_bucket.copy(copy_source, target_key + os.path.basename(source_key["Key"]))
s3.Object(source_bucket, source_key["Key"]).delete()
19 changes: 14 additions & 5 deletions common/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,8 @@
import tarfile
import traceback
import zipfile
from pathlib import Path
from http import HTTPStatus
from urllib.parse import urlparse
from zipfile import ZipFile

import requests
import web3
Expand Down Expand Up @@ -236,7 +235,7 @@ def ipfsuri_to_bytesuri(uri):
def publish_file_in_ipfs(file_url, file_dir, ipfs_client, wrap_with_directory=True):
filename = download_file_from_url(file_url=file_url, file_dir=file_dir)
file_type = os.path.splitext(filename)[1]
logger.info(f" file type is = '{file_type.lower()}` ")
logger.info(f" file type is = '{file_type.lower()}' ")
if file_type.lower() == ".zip":
return publish_zip_file_in_ipfs(filename, file_dir, ipfs_client)
#todo , you need to tar the folder and add that to the ipfs hash
Expand All @@ -263,7 +262,7 @@ def download_file_from_url(file_url, file_dir):


def convert_zip_file_to_tar_bytes(file_dir, filename):
with ZipFile(f"{file_dir}/{filename}", 'r') as zipObj:
with zipfile.ZipFile(f"{file_dir}/{filename}", 'r') as zipObj:
listOfFileNames = zipObj.namelist()
zipObj.extractall(file_dir, listOfFileNames)
if not os.path.isdir(file_dir):
Expand Down Expand Up @@ -299,7 +298,7 @@ def send_email_notification(recipients, notification_subject, notification_messa
boto_util.invoke_lambda(lambda_function_arn=notification_arn, invocation_type="RequestResponse",
payload=json.dumps(send_notification_payload))
logger.info(f"email_sent to {recipient}")
except:
except Exception:
logger.error(f"Error happened while sending email to recipient {recipient}")


Expand Down Expand Up @@ -366,3 +365,13 @@ def create_text_file(target_path, context):
f = open(target_path, "a")
f.write(context)
f.close()


def daemon_health_check(daemon_endpoint: str) -> dict:
try:
response = requests.get(f"{daemon_endpoint}/heartbeat")
if response.status_code == HTTPStatus.OK:
return response.json()
except Exception:
logger.warning(f"Daemon endpoint {daemon_endpoint} is not available")
return None
18 changes: 12 additions & 6 deletions contract_api/registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,10 @@

from common.constant import BuildStatus
from common.logger import get_logger
from common.utils import Utils
from common.utils import Utils, daemon_health_check
from contract_api.constant import GET_ALL_SERVICE_LIMIT, GET_ALL_SERVICE_OFFSET_LIMIT
from contract_api.dao.service_repository import ServiceRepository
from contract_api.domain.factory.service_factory import ServiceFactory
from contract_api.domain.models.demo_component import DemoComponent
from contract_api.domain.models.offchain_service_attribute import OffchainServiceAttribute
from contract_api.infrastructure.repositories.service_media_repository import ServiceMediaRepository
from contract_api.infrastructure.repositories.service_repository import ServiceRepository as NewServiceRepository, \
Expand Down Expand Up @@ -230,7 +229,6 @@ def get_all_srvcs(self, qry_param):
sub_qry = self._prepare_subquery(s=s, q=q, fm=fields_mapping)
print("get_all_srvcs::sub_qry: ", sub_qry)

filter_qry = ""
if qry_param.get("filters", None) is not None:
filter_query, values = self._filters_to_query(
qry_param.get("filters"))
Expand Down Expand Up @@ -409,13 +407,21 @@ def get_service_data_by_org_id_and_service_id(self, org_id, service_id):
"payment": json.loads(rec["payment"])}

is_available = 0

result["training_endpoint"] = None
result["training_proto"] = False

# Hard Coded Free calls in group data
for rec in service_group_data:
rec["free_calls"] = rec.get("free_calls", 0)
if is_available == 0:
endpoints = rec['endpoints']
endpoints = rec["endpoints"]
for endpoint in endpoints:
is_available = endpoint['is_available']
is_available = endpoint["is_available"]
daemon_health_check_response = daemon_health_check(endpoint)
if daemon_health_check_response is not None:
result["training_endpoint"] = endpoint if daemon_health_check_response["trainingInProto"] else None
result["training_enabled"] = daemon_health_check_response["trainingEnabled"]
if is_available == 1:
break
rec.update(org_groups_dict.get(rec['group_id'], {}))
Expand Down Expand Up @@ -469,7 +475,7 @@ def update_service_rating(self, org_id, service_id):
and org_id.
"""
try:
update_service_metadata = self.repo.execute(
self.repo.execute(
"UPDATE service_metadata A INNER JOIN "
"(SELECT U.org_id, U.service_id, AVG(U.rating) AS service_rating, count(*) AS total_users_rated "
"FROM user_service_vote AS U WHERE U.rating IS NOT NULL GROUP BY U.service_id, U.org_id ) AS B "
Expand Down
4 changes: 3 additions & 1 deletion contract_api/testcases/test_variables.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,9 @@
'demo_component_status': '',
'demo_component_last_modified': ''
},
"demo_component_required": 0
"demo_component_required": 0,
"training_endpoint": None,
"training_enabled": False,
},
"MEDIA": {
'media': [
Expand Down
2 changes: 2 additions & 0 deletions contract_api/testcases/unit_testcases/test_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,8 @@ def test_get_service_data_by_org_id_and_service_id_without_media(self):
db.execute(insert_metadata_query)

response = registry.get_service_data_by_org_id_and_service_id(TEST_ORG_ID, TEST_SERVICE_ID)
print(response)
print(GET_SERVICE_RESPONSE["BASE"])
self.assertDictEqual(response, GET_SERVICE_RESPONSE["BASE"])

def tearDown(self):
Expand Down
28 changes: 28 additions & 0 deletions registry/alembic/versions/fc1ad8c1f8b8_add_training_indicator.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
"""add training_indicator

Revision ID: fc1ad8c1f8b8
Revises: 3312b862c6cb
Create Date: 2024-05-02 11:08:39.254329

"""
from alembic import op
import sqlalchemy as sa


# revision identifiers, used by Alembic.
revision = 'fc1ad8c1f8b8'
down_revision = '3312b862c6cb'
branch_labels = None
depends_on = None


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('service', sa.Column('training_indicator', sa.BOOLEAN(), nullable=True))
# ### end Alembic commands ###


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('service', 'training_indicator')
# ### end Alembic commands ###
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
from common.boto_utils import BotoUtils
from common.exceptions import MethodNotImplemented
from common.logger import get_logger
from common.utils import Utils
import registry.config
from registry.constants import EnvironmentType, ORG_STATUS_LIST, ORG_TYPE_VERIFICATION_TYPE_MAPPING, \
OrganizationActions, OrganizationIDAvailabilityStatus, OrganizationMemberStatus, OrganizationStatus, \
Expand All @@ -20,7 +19,7 @@
from registry.mail_templates import \
get_notification_mail_template_for_service_provider_when_org_is_submitted_for_onboarding, \
get_owner_mail_for_org_rejected, get_owner_mail_for_org_changes_requested, get_owner_mail_for_org_approved
from registry.mail_templates import get_org_member_invite_mail, get_org_approval_mail
from registry.mail_templates import get_org_member_invite_mail

org_repo = OrganizationPublisherRepository()

Expand Down Expand Up @@ -127,6 +126,7 @@ def save_transaction_hash_for_publish_org(self, payload):
return "OK"

def get_all_member(self, status, role, pagination_details):
#TODO pagination
offset = pagination_details.get("offset", None)
limit = pagination_details.get("limit", None)
sort = pagination_details.get("sort", None)
Expand Down
Loading