diff --git a/README.md b/README.md index 33db63d..edfbd2c 100644 --- a/README.md +++ b/README.md @@ -3,17 +3,20 @@ This prototype explores how Large Language Models (LLMs) can enhance education by offering a personalized and adaptive learning experience. The LLM complements an instructor's role by providing tailored feedback, identifying knowledge gaps, and recommending targeted resources to students. This approach resonates with the core principles of personalized education, transforming the learning experience into a journey of self-discovery and growth. -| Index | Description | -| :-------------------------------------------------- | :------------------------------------------------------ | -| [High Level Architecture](#high-level-architecture) | High level overview illustrating component interactions | -| [Deployment](#deployment-guide) | How to deploy the project | -| [User Guide](#user-guide) | The working solution | -| [Directories](#directories) | General project directory structure | -| [RAG Documentation](#rag-documentation) | Documentation on how the project uses RAG | -| [API Documentation](#api-documentation) | Documentation on the API the project uses | -| [Changelog](#changelog) | Any changes post publish | -| [Credits](#credits) | Meet the team behind the solution | -| [License](#license) | License details | +| Index | Description | +| :------------------------------------------------------ | :------------------------------------------------------ | +| [High Level Architecture](#high-level-architecture) | High level overview illustrating component interactions | +| [Deployment](#deployment-guide) | How to deploy the project | +| [User Guide](#user-guide) | The working solution | +| [Security & Network Guide](#security-guide-and-network-configuration) | Analysis on network architecture and security | +| [Directories](#directories) | General project directory structure | +| [RAG Documentation](#rag-documentation) | Documentation on how the project uses RAG | +| [API Documentation](#api-documentation) | Documentation on the API the project uses | +| [Changelog](#changelog) | Any changes post publish | +| [Credits](#credits) | Meet the team behind the solution | +| [License](#license) | License details | + +--- ## High-Level Architecture @@ -29,6 +32,12 @@ To deploy this solution, please follow the steps laid out in the [Deployment Gui Please refer to the [Web App User Guide](./docs/userGuide.md) for instructions on navigating the web app interface. + +## Security Guide and Network Configuration + +Please refer to the [Security Guide](./docs/securityGuide.md) for an analysis on the network architecture and security of the project + + ## Directories ``` diff --git a/cdk/OpenAPI_Swagger_Definition.yaml b/cdk/OpenAPI_Swagger_Definition.yaml index 52c7610..3b0518b 100644 --- a/cdk/OpenAPI_Swagger_Definition.yaml +++ b/cdk/OpenAPI_Swagger_Definition.yaml @@ -1253,8 +1253,8 @@ paths: passthroughBehavior: "when_no_match" httpMethod: "POST" type: "aws_proxy" - - /instructor/course_messages: + + /instructor/events_notify: options: summary: CORS support description: | @@ -1279,34 +1279,103 @@ paths: responseTemplates: application/json: | {} - get: - summary: Retrieve course messages and competency status for a specific student + post: + tags: + - Events + summary: Notify clients of an event + operationId: events_notify_POST + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + message: + type: string + description: The message to send + sessionId: + type: string + description: The session ID + responses: + "200": + description: Notification sent + "400": + description: Invalid value + "401": + description: Unauthorized + "429": + description: Too Many Requests + "500": + description: Internal Server Error + security: + - instructorAuthorizer: [] + x-amazon-apigateway-integration: + uri: + Fn::Sub: "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${NotificationFunction.Arn}/invocations" + passthroughBehavior: "when_no_match" + httpMethod: "POST" + type: "aws_proxy" + + + /instructor/course_messages: + options: + summary: CORS support description: | - This endpoint returns all messages for a specific student in a course. It includes user_id, module_name, session, message content, competency status (complete/incomplete), and message timestamp, ordered by user_id, module_name, session, and timestamp. - parameters: - - name: instructor_email - in: query - required: true - schema: - type: string - description: The email of the instructor. - - name: course_id - in: query - required: true - schema: - type: string - description: The course ID. + Enable CORS by returning correct headers + responses: + 200: + $ref: "#/components/responses/Success" + x-amazon-apigateway-integration: + type: mock + requestTemplates: + application/json: | + { + "statusCode" : 200 + } + responses: + default: + statusCode: "200" + responseParameters: + method.response.header.Access-Control-Allow-Headers: "'Content-Type,X-Amz-Date,Authorization,X-Api-Key'" + method.response.header.Access-Control-Allow-Methods: "'*'" + method.response.header.Access-Control-Allow-Origin: "'*'" + responseTemplates: + application/json: | + {} + post: + tags: + - Instructor + summary: Submit course message retrieval job + operationId: instructor_course_messages_POST + description: Submit a job to retrieve course messages via SQS. + requestBody: + required: true + content: + application/json: + schema: + type: object + properties: + instructor_email: + type: string + description: The instructor's email. + course_id: + type: string + description: The course ID. + required: + - instructor_email + - course_id responses: "200": - description: Presigned URL generated successfully + description: Job submitted successfully. content: application/json: schema: type: object properties: - presignedurl: + message: type: string - description: Presigned URL to upload document file to S3 Bucket + description: A success message. "400": description: Bad Request "401": @@ -1319,7 +1388,7 @@ paths: - instructorAuthorizer: [] x-amazon-apigateway-integration: uri: - Fn::Sub: "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${instructorFunction.Arn}/invocations" + Fn::Sub: "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${sqsFunction.Arn}/invocations" passthroughBehavior: "when_no_match" httpMethod: "POST" type: "aws_proxy" @@ -1554,6 +1623,79 @@ paths: passthroughBehavior: "when_no_match" httpMethod: "POST" type: "aws_proxy" + /instructor/fetch_chatlogs: + options: + summary: CORS support + description: Enable CORS by returning correct headers + responses: + 200: + $ref: "#/components/responses/Success" + x-amazon-apigateway-integration: + type: mock + requestTemplates: + application/json: | + { + "statusCode" : 200 + } + responses: + default: + statusCode: "200" + responseParameters: + method.response.header.Access-Control-Allow-Headers: "'Content-Type,X-Amz-Date,Authorization,X-Api-Key'" + method.response.header.Access-Control-Allow-Methods: "'*'" + method.response.header.Access-Control-Allow-Origin: "'*'" + responseTemplates: + application/json: | + {} + + get: + tags: + - Instructor + summary: Get a chatlog files for a specific course requested by instructor + operationId: instructor_fetch_chatlogs_GET + parameters: + - in: query + name: course_id + required: true + description: ID of the course + schema: + type: string + - in: query + name: instructor_email + required: true + description: Email of the instructor + schema: + type: string + responses: + "200": + description: Recieved all the chatlog files for the course requested by the instructor successfully + content: + application/json: + schema: + type: object + properties: + log_files: + type: object + description: A dictionary where keys are file names and values are presigned URLs + additionalProperties: + type: string + description: Presigned URL for the log file + "400": + description: Bad Request + "401": + description: Unauthorized + "429": + description: Too Many Requests + "500": + description: Internal Server Error + security: + - instructorAuthorizer: [] + x-amazon-apigateway-integration: + uri: + Fn::Sub: "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${GetChatLogsFunction.Arn}/invocations" + passthroughBehavior: "when_no_match" + httpMethod: "POST" + type: "aws_proxy" /instructor/courses: options: summary: CORS support @@ -1610,6 +1752,68 @@ paths: passthroughBehavior: "when_no_match" httpMethod: "POST" type: "aws_proxy" + /instructor/remove_completed_notification: + options: + summary: CORS support + description: | + Enable CORS by returning correct headers + responses: + 200: + $ref: "#/components/responses/Success" + x-amazon-apigateway-integration: + type: mock + requestTemplates: + application/json: | + { + "statusCode" : 200 + } + responses: + default: + statusCode: "200" + responseParameters: + method.response.header.Access-Control-Allow-Headers: "'Content-Type,X-Amz-Date,Authorization,X-Api-Key'" + method.response.header.Access-Control-Allow-Methods: "'*'" + method.response.header.Access-Control-Allow-Origin: "'*'" + responseTemplates: + application/json: | + {} + delete: + tags: + - Instructor + summary: Delete row from database since CSV file full of chatlogs has been completed and notification has been sent to front-end + operationId: instructor_delete_file_DELETE + parameters: + - in: query + name: course_id + required: true + description: ID of the course + schema: + type: string + - in: query + name: instructor_email + required: true + description: Email of the instructor + schema: + type: string + responses: + "200": + description: Notification deleted successfully + "400": + description: Bad Request + "401": + description: Unauthorized + "429": + description: Too Many Requests + "500": + description: Internal Server Error + security: + - instructorAuthorizer: [] + x-amazon-apigateway-integration: + uri: + Fn::Sub: "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${instructorFunction.Arn}/invocations" + passthroughBehavior: "when_no_match" + httpMethod: "POST" + type: "aws_proxy" /instructor/delete_file: options: summary: CORS support @@ -2899,6 +3103,87 @@ paths: passthroughBehavior: "when_no_match" httpMethod: "POST" type: "aws_proxy" + /instructor/check_notifications_status: + options: + summary: CORS support + description: | + Enable CORS by returning correct headers + responses: + 200: + $ref: "#/components/responses/Success" + x-amazon-apigateway-integration: + type: mock + requestTemplates: + application/json: | + { + "statusCode" : 200 + } + responses: + default: + statusCode: "200" + responseParameters: + method.response.header.Access-Control-Allow-Headers: "'Content-Type,X-Amz-Date,Authorization,X-Api-Key'" + method.response.header.Access-Control-Allow-Methods: "'*'" + method.response.header.Access-Control-Allow-Origin: "'*'" + responseTemplates: + application/json: | + {} + get: + tags: + - Instructor + summary: Check the completion status for getting the chatlogs for a course requested by an instructor + operationId: instructor_check_notifications_status_GET + parameters: + - in: query + name: course_id + required: true + description: The ID of the course + schema: + type: string + - in: query + name: instructor_email + required: true + description: The email of the instructor + schema: + type: string + responses: + "200": + description: Prompt retrieved successfully + content: + application/json: + schema: + type: object + properties: + isEnabled: + type: Boolean + description: Indicates whether the button to generate chatlogs should be enabled or disabled + completionStatus: + type: boolean + nullable: true + description: The actual value of the completion status. If the row does not exist, this will be null. + requestId: + type: string + format: uuid + nullable: true + description: The request_id used to subscribe to AppSync. If the row does not exist, this will be null. + "400": + description: Bad Request + "404": + description: Course not found + "401": + description: Unauthorized + "429": + description: Too Many Requests + "500": + description: Internal Server Error + security: + - instructorAuthorizer: [] + x-amazon-apigateway-integration: + uri: + Fn::Sub: "arn:aws:apigateway:${AWS::Region}:lambda:path/2015-03-31/functions/${instructorFunction.Arn}/invocations" + passthroughBehavior: "when_no_match" + httpMethod: "POST" + type: "aws_proxy" /instructor/get_prompt: options: summary: CORS support diff --git a/cdk/bin/cdk.ts b/cdk/bin/cdk.ts index 9233494..9440459 100644 --- a/cdk/bin/cdk.ts +++ b/cdk/bin/cdk.ts @@ -14,7 +14,6 @@ const env = { region: process.env.CDK_DEFAULT_REGION }; const StackPrefix = app.node.tryGetContext("StackPrefix") - const vpcStack = new VpcStack(app, `${StackPrefix}-VpcStack`, { env }); const dbStack = new DatabaseStack(app, `${StackPrefix}-DatabaseStack`, vpcStack, { env }); const apiStack = new ApiGatewayStack(app, `${StackPrefix}-ApiGatewayStack`, dbStack, vpcStack, { env }); diff --git a/cdk/data_ingestion/src/helpers/helper.py b/cdk/data_ingestion/src/helpers/helper.py index cf5bf8e..fa6ab06 100644 --- a/cdk/data_ingestion/src/helpers/helper.py +++ b/cdk/data_ingestion/src/helpers/helper.py @@ -83,8 +83,6 @@ def store_course_data( port=int(vectorstore_config_dict['port']) ) - print("vector_store",vectorstore) - if vectorstore: # define record manager namespace = f"pgvector/{vectorstore_config_dict['collection_name']}" diff --git a/cdk/data_ingestion/src/main.py b/cdk/data_ingestion/src/main.py index 39c6e6b..6bc2b32 100644 --- a/cdk/data_ingestion/src/main.py +++ b/cdk/data_ingestion/src/main.py @@ -12,54 +12,71 @@ logging.basicConfig(level=logging.INFO) logger = logging.getLogger() +# Environment variables DB_SECRET_NAME = os.environ["SM_DB_CREDENTIALS"] REGION = os.environ["REGION"] AILA_DATA_INGESTION_BUCKET = os.environ["BUCKET"] EMBEDDING_BUCKET_NAME = os.environ["EMBEDDING_BUCKET_NAME"] RDS_PROXY_ENDPOINT = os.environ["RDS_PROXY_ENDPOINT"] +EMBEDDING_MODEL_PARAM = os.environ["EMBEDDING_MODEL_PARAM"] + +# AWS Clients +secrets_manager_client = boto3.client("secretsmanager") +ssm_client = boto3.client("ssm") +bedrock_runtime = boto3.client("bedrock-runtime", region_name=REGION) + +# Cached resources +connection = None +db_secret = None +EMBEDDING_MODEL_ID = None def get_secret(): - # secretsmanager client to get db credentials - sm_client = boto3.client("secretsmanager") - response = sm_client.get_secret_value(SecretId=DB_SECRET_NAME)["SecretString"] - secret = json.loads(response) - return secret + global db_secret + if db_secret is None: + try: + response = secrets_manager_client.get_secret_value(SecretId=DB_SECRET_NAME)["SecretString"] + db_secret = json.loads(response) + except Exception as e: + logger.error(f"Error fetching secret: {e}") + raise + return db_secret -def get_parameter(param_name): +def get_parameter(): """ Fetch a parameter value from Systems Manager Parameter Store. """ - try: - ssm_client = boto3.client("ssm") - response = ssm_client.get_parameter(Name=param_name, WithDecryption=True) - return response["Parameter"]["Value"] - except Exception as e: - logger.error(f"Error fetching parameter {param_name}: {e}") - raise - -## GET PARAMETER VALUES FOR CONSTANTS -EMBEDDING_MODEL_ID = get_parameter(os.environ["EMBEDDING_MODEL_PARAM"]) + global EMBEDDING_MODEL_ID + if EMBEDDING_MODEL_ID is None: + try: + response = ssm_client.get_parameter(Name=EMBEDDING_MODEL_PARAM, WithDecryption=True) + EMBEDDING_MODEL_ID = response["Parameter"]["Value"] + except Exception as e: + logger.error(f"Error fetching parameter {EMBEDDING_MODEL_PARAM}: {e}") + raise + return EMBEDDING_MODEL_ID def connect_to_db(): - try: - db_secret = get_secret() - connection_params = { - 'dbname': db_secret["dbname"], - 'user': db_secret["username"], - 'password': db_secret["password"], - 'host': RDS_PROXY_ENDPOINT, - 'port': db_secret["port"] - } - connection_string = " ".join([f"{key}={value}" for key, value in connection_params.items()]) - connection = psycopg2.connect(connection_string) - logger.info("Connected to the database!") - return connection - except Exception as e: - logger.error(f"Failed to connect to database: {e}") - if connection: - connection.rollback() - connection.close() - return None + global connection + if connection is None or connection.closed: + try: + secret = get_secret() + connection_params = { + 'dbname': secret["dbname"], + 'user': secret["username"], + 'password': secret["password"], + 'host': RDS_PROXY_ENDPOINT, + 'port': secret["port"] + } + connection_string = " ".join([f"{key}={value}" for key, value in connection_params.items()]) + connection = psycopg2.connect(connection_string) + logger.info("Connected to the database!") + except Exception as e: + logger.error(f"Failed to connect to database: {e}") + if connection: + connection.rollback() + connection.close() + raise + return connection def parse_s3_file_path(file_key): # Assuming the file path is of the format: {course_id}/{module_id}/{documents}/{file_name}.{file_type} @@ -140,38 +157,30 @@ def insert_file_into_db(module_id, file_name, file_type, file_path, bucket_name) connection.commit() cur.close() - connection.close() except Exception as e: if cur: cur.close() - if connection: - connection.rollback() - connection.close() + connection.rollback() logger.error(f"Error inserting file {file_name}.{file_type} into database: {e}") raise def update_vectorstore_from_s3(bucket, course_id): - - bedrock_runtime = boto3.client( - service_name="bedrock-runtime", - region_name=REGION - ) embeddings = BedrockEmbeddings( - model_id=EMBEDDING_MODEL_ID, + model_id=get_parameter(), client=bedrock_runtime, region_name=REGION ) - db_secret = get_secret() + secret = get_secret() vectorstore_config_dict = { 'collection_name': f'{course_id}', - 'dbname': db_secret["dbname"], - 'user': db_secret["username"], - 'password': db_secret["password"], + 'dbname': secret["dbname"], + 'user': secret["username"], + 'password': secret["password"], 'host': RDS_PROXY_ENDPOINT, - 'port': db_secret["port"] + 'port': secret["port"] } try: diff --git a/cdk/graphql/schema.graphql b/cdk/graphql/schema.graphql new file mode 100644 index 0000000..cfd4568 --- /dev/null +++ b/cdk/graphql/schema.graphql @@ -0,0 +1,23 @@ +type Subscription { + onNotify(request_id: String!): Notification + @aws_subscribe(mutations: ["sendNotification"]) +} + +type Notification { + message: String + request_id: String +} + +type Mutation { + sendNotification(message: String!, request_id: String!): Notification +} + +type Query { + getNotifications: [Notification] +} + +schema { + query: Query + mutation: Mutation + subscription: Subscription +} \ No newline at end of file diff --git a/cdk/lambda/deleteFile/deleteFile.py b/cdk/lambda/deleteFile/deleteFile.py index 2fd9f6d..153fe3a 100644 --- a/cdk/lambda/deleteFile/deleteFile.py +++ b/cdk/lambda/deleteFile/deleteFile.py @@ -11,33 +11,42 @@ DB_SECRET_NAME = os.environ["SM_DB_CREDENTIALS"] RDS_PROXY_ENDPOINT = os.environ["RDS_PROXY_ENDPOINT"] +# AWS Clients +secrets_manager_client = boto3.client('secretsmanager') + +# Global variables for caching +connection = None +db_secret = None + def get_secret(): - # secretsmanager client to get db credentials - sm_client = boto3.client("secretsmanager") - response = sm_client.get_secret_value(SecretId=DB_SECRET_NAME)["SecretString"] - secret = json.loads(response) - return secret + global db_secret + if not db_secret: + response = secrets_manager_client.get_secret_value(SecretId=DB_SECRET_NAME)["SecretString"] + db_secret = json.loads(response) + return db_secret def connect_to_db(): - try: - db_secret = get_secret() - connection_params = { - 'dbname': db_secret["dbname"], - 'user': db_secret["username"], - 'password': db_secret["password"], - 'host': RDS_PROXY_ENDPOINT, - 'port': db_secret["port"] - } - connection_string = " ".join([f"{key}={value}" for key, value in connection_params.items()]) - connection = psycopg2.connect(connection_string) - logger.info("Connected to the database!") - return connection - except Exception as e: - logger.error(f"Failed to connect to database: {e}") - if connection: - connection.rollback() - connection.close() - return None + global connection + if connection is None or connection.closed: + try: + secret = get_secret() + connection_params = { + 'dbname': secret["dbname"], + 'user': secret["username"], + 'password': secret["password"], + 'host': RDS_PROXY_ENDPOINT, + 'port': secret["port"] + } + connection_string = " ".join([f"{key}={value}" for key, value in connection_params.items()]) + connection = psycopg2.connect(connection_string) + logger.info("Connected to the database!") + except Exception as e: + logger.error(f"Failed to connect to database: {e}") + if connection: + connection.rollback() + connection.close() + raise + return connection def delete_file_from_db(module_id, file_name, file_type): connection = connect_to_db() @@ -61,13 +70,10 @@ def delete_file_from_db(module_id, file_name, file_type): logger.info(f"Successfully deleted file {file_name}.{file_type} for module {module_id}.") cur.close() - connection.close() except Exception as e: if cur: cur.close() - if connection: - connection.rollback() - connection.close() + connection.rollback() logger.error(f"Error deleting file {file_name}.{file_type} from database: {e}") raise diff --git a/cdk/lambda/deleteLastMessage/deleteLastMessage.py b/cdk/lambda/deleteLastMessage/deleteLastMessage.py index f7746e3..ca3bfec 100644 --- a/cdk/lambda/deleteLastMessage/deleteLastMessage.py +++ b/cdk/lambda/deleteLastMessage/deleteLastMessage.py @@ -7,65 +7,69 @@ logger = logging.getLogger() logger.setLevel(logging.INFO) +# AWS Clients dynamodb_client = boto3.client('dynamodb') +secrets_manager_client = boto3.client('secretsmanager') +ssm_client = boto3.client("ssm") + +# Global variables for caching +connection = None +db_secret = None +TABLE_NAME = None DB_SECRET_NAME = os.environ["SM_DB_CREDENTIALS"] RDS_PROXY_ENDPOINT = os.environ["RDS_PROXY_ENDPOINT"] def get_secret(secret_name, expect_json=True): - try: - # secretsmanager client to get db credentials - sm_client = boto3.client("secretsmanager") - response = sm_client.get_secret_value(SecretId=secret_name)["SecretString"] - - if expect_json: - return json.loads(response) - else: - print(response) - return response - - except json.JSONDecodeError as e: - logger.error(f"Failed to decode JSON for secret {secret_name}: {e}") - raise ValueError(f"Secret {secret_name} is not properly formatted as JSON.") - except Exception as e: - logger.error(f"Error fetching secret {secret_name}: {e}") - raise + global db_secret + if db_secret is None: + try: + response = secrets_manager_client.get_secret_value(SecretId=secret_name)["SecretString"] + db_secret = json.loads(response) if expect_json else response + except json.JSONDecodeError as e: + logger.error(f"Failed to decode JSON for secret: {e}") + raise ValueError(f"Secret is not properly formatted as JSON.") + except Exception as e: + logger.error(f"Error fetching secret: {e}") + raise + return db_secret def get_parameter(param_name): """ Fetch a parameter value from Systems Manager Parameter Store. """ - try: - ssm_client = boto3.client("ssm") - response = ssm_client.get_parameter(Name=param_name, WithDecryption=True) - return response["Parameter"]["Value"] - except Exception as e: - logger.error(f"Error fetching parameter {param_name}: {e}") - raise - -## GET PARAMETER VALUES FOR CONSTANTS -TABLE_NAME = get_parameter(os.environ["TABLE_NAME_PARAM"]) + global TABLE_NAME + if TABLE_NAME is None: + try: + response = ssm_client.get_parameter(Name=param_name, WithDecryption=True) + TABLE_NAME = response["Parameter"]["Value"] + except Exception as e: + logger.error(f"Error fetching parameter {param_name}: {e}") + raise + return TABLE_NAME def connect_to_db(): - try: - db_secret = get_secret(DB_SECRET_NAME) - connection_params = { - 'dbname': db_secret["dbname"], - 'user': db_secret["username"], - 'password': db_secret["password"], - 'host': RDS_PROXY_ENDPOINT, - 'port': db_secret["port"] - } - connection_string = " ".join([f"{key}={value}" for key, value in connection_params.items()]) - connection = psycopg2.connect(connection_string) - logger.info("Connected to the database!") - return connection - except Exception as e: - logger.error(f"Failed to connect to database: {e}") - if connection: - connection.rollback() - connection.close() - return None + global connection + if connection is None or connection.closed: + try: + secret = get_secret(DB_SECRET_NAME) + connection_params = { + 'dbname': secret["dbname"], + 'user': secret["username"], + 'password': secret["password"], + 'host': RDS_PROXY_ENDPOINT, + 'port': secret["port"] + } + connection_string = " ".join([f"{key}={value}" for key, value in connection_params.items()]) + connection = psycopg2.connect(connection_string) + logger.info("Connected to the database!") + except Exception as e: + logger.error(f"Failed to connect to database: {e}") + if connection: + connection.rollback() + connection.close() + raise + return connection def delete_last_two_db_messages(session_id): connection = connect_to_db() @@ -87,6 +91,7 @@ def delete_last_two_db_messages(session_id): if len(messages) < 2: logger.info(f"Not enough messages to delete for session_id: {session_id}") + cur.close() return False message_ids = tuple([msg[0] for msg in messages]) @@ -98,7 +103,6 @@ def delete_last_two_db_messages(session_id): connection.commit() cur.close() - connection.close() logger.info(f"Successfully deleted the last two messages for session_id: {session_id}") return True @@ -106,14 +110,11 @@ def delete_last_two_db_messages(session_id): logger.error(f"Error deleting messages from database: {e}") if cur: cur.close() - if connection: - connection.rollback() - connection.close() + connection.rollback() return False def lambda_handler(event, context): query_params = event.get("queryStringParameters", {}) - session_id = query_params.get("session_id", "") if not session_id: @@ -131,8 +132,9 @@ def lambda_handler(event, context): try: # Fetch the conversation history from DynamoDB + table_name = get_parameter(os.environ["TABLE_NAME_PARAM"]) response = dynamodb_client.get_item( - TableName=TABLE_NAME, + TableName=table_name, Key={ 'SessionId': { 'S': session_id @@ -175,7 +177,7 @@ def lambda_handler(event, context): # Update the conversation history in DynamoDB dynamodb_client.update_item( - TableName=TABLE_NAME, + TableName=table_name, Key={ 'SessionId': { 'S': session_id diff --git a/cdk/lambda/eventNotification/eventNotification.py b/cdk/lambda/eventNotification/eventNotification.py new file mode 100644 index 0000000..44b9093 --- /dev/null +++ b/cdk/lambda/eventNotification/eventNotification.py @@ -0,0 +1,26 @@ +import json +import os +import boto3 + +def lambda_handler(event, context): + print(f"Event Received: {json.dumps(event)}") + try: + # Extract arguments from the AppSync payload + arguments = event.get("arguments", {}) + request_id = arguments.get("request_id", "DefaultRequestId") + message = arguments.get("message", "Default message") + + # Log the extracted values for debugging + print(f"Extracted request_id: {request_id}, message: {message}") + + # Return the values back to AppSync + return { + "request_id": request_id, + "message": message + } + + except Exception as e: + print(f"Error: {str(e)}") + return { + "error": str(e) + } \ No newline at end of file diff --git a/cdk/lambda/getChatLogsFunction/getChatLogsFunction.py b/cdk/lambda/getChatLogsFunction/getChatLogsFunction.py new file mode 100644 index 0000000..079f1bd --- /dev/null +++ b/cdk/lambda/getChatLogsFunction/getChatLogsFunction.py @@ -0,0 +1,113 @@ +import os +import json +import boto3 +from botocore.config import Config +from aws_lambda_powertools import Logger + +logger = Logger() + +# Environment variables +REGION = os.environ["REGION"] +BUCKET = os.environ["BUCKET"] + +# AWS Clients +s3 = boto3.client( + "s3", + endpoint_url=f"https://s3.{REGION}.amazonaws.com", + config=Config(s3={"addressing_style": "virtual"}, region_name=REGION, signature_version="s3v4"), +) + +def list_files_in_s3_prefix(bucket, prefix): + files = [] + continuation_token = None + + # Fetch all objects in the directory, handling pagination + while True: + if continuation_token: + result = s3.list_objects_v2( + Bucket=bucket, + Prefix=prefix, + ContinuationToken=continuation_token + ) + else: + result = s3.list_objects_v2(Bucket=bucket, Prefix=prefix) + + if "Contents" in result: + for obj in result["Contents"]: + files.append(obj["Key"].replace(prefix, "")) + + # Check if there's more data to fetch + if result.get("IsTruncated"): + continuation_token = result.get("NextContinuationToken") + else: + break + + # **Sort files in reverse order (most recent first)** + files.sort(reverse=True) + + return files + +def generate_presigned_url(bucket, key): + try: + return s3.generate_presigned_url( + ClientMethod="get_object", + Params={"Bucket": bucket, "Key": key}, + ExpiresIn=300, + HttpMethod="GET", + ) + except Exception as e: + logger.exception(f"Error generating presigned URL for {key}: {e}") + return None + +@logger.inject_lambda_context +def lambda_handler(event, context): + query_params = event.get("queryStringParameters", {}) + + course_id = query_params.get("course_id", "") + instructor_email = query_params.get("instructor_email", "") + + if not course_id or not instructor_email: + logger.error("Missing required parameters", extra={"course_id": course_id, "instructor_email": instructor_email}) + return { + "statusCode": 400, + "headers": { + "Content-Type": "application/json", + "Access-Control-Allow-Headers": "*", + "Access-Control-Allow-Origin": "*", + "Access-Control-Allow-Methods": "*", + }, + "body": json.dumps("Missing required parameters: course_id or instructor_email"), + } + + try: + log_prefix = f"{course_id}/{instructor_email}/" + + log_files = list_files_in_s3_prefix(BUCKET, log_prefix) + + # Generate presigned URLs for logs + log_files_urls = {file_name: generate_presigned_url(BUCKET, f"{log_prefix}{file_name}") for file_name in log_files} + + logger.info("Presigned URLs generated successfully", extra={"log_files": log_files_urls}) + + return { + "statusCode": 200, + "headers": { + "Content-Type": "application/json", + "Access-Control-Allow-Headers": "*", + "Access-Control-Allow-Origin": "*", + "Access-Control-Allow-Methods": "*", + }, + "body": json.dumps({"log_files": log_files_urls}), + } + except Exception as e: + logger.exception(f"Error generating presigned URLs for chat logs: {e}") + return { + "statusCode": 500, + "headers": { + "Content-Type": "application/json", + "Access-Control-Allow-Headers": "*", + "Access-Control-Allow-Origin": "*", + "Access-Control-Allow-Methods": "*", + }, + "body": json.dumps("Internal server error"), + } \ No newline at end of file diff --git a/cdk/lambda/getFilesFunction/getFilesFunction.py b/cdk/lambda/getFilesFunction/getFilesFunction.py index f139ed6..a4ecfca 100644 --- a/cdk/lambda/getFilesFunction/getFilesFunction.py +++ b/cdk/lambda/getFilesFunction/getFilesFunction.py @@ -7,7 +7,15 @@ logger = Logger() +# Environment variables REGION = os.environ["REGION"] +BUCKET = os.environ["BUCKET"] +DB_SECRET_NAME = os.environ["SM_DB_CREDENTIALS"] +RDS_PROXY_ENDPOINT = os.environ["RDS_PROXY_ENDPOINT"] + +# AWS Clients +secrets_manager_client = boto3.client('secretsmanager') + s3 = boto3.client( "s3", endpoint_url=f"https://s3.{REGION}.amazonaws.com", @@ -15,37 +23,47 @@ s3={"addressing_style": "virtual"}, region_name=REGION, signature_version="s3v4" ), ) -BUCKET = os.environ["BUCKET"] -DB_SECRET_NAME = os.environ["SM_DB_CREDENTIALS"] -RDS_PROXY_ENDPOINT = os.environ["RDS_PROXY_ENDPOINT"] -def get_secret(): - # secretsmanager client to get db credentials - sm_client = boto3.client("secretsmanager") - response = sm_client.get_secret_value(SecretId=DB_SECRET_NAME)["SecretString"] - secret = json.loads(response) - return secret +# Global variables for caching +connection = None +db_secret = None + +def get_secret(secret_name): + global db_secret + if db_secret is None: + try: + response = secrets_manager_client.get_secret_value(SecretId=secret_name)["SecretString"] + db_secret = json.loads(response) + except json.JSONDecodeError as e: + logger.error(f"Failed to decode JSON for secret: {e}") + raise ValueError(f"Secret is not properly formatted as JSON.") + except Exception as e: + logger.error(f"Error fetching secret: {e}") + raise + return db_secret def connect_to_db(): - try: - db_secret = get_secret() - connection_params = { - 'dbname': db_secret["dbname"], - 'user': db_secret["username"], - 'password': db_secret["password"], - 'host': RDS_PROXY_ENDPOINT, - 'port': db_secret["port"] - } - connection_string = " ".join([f"{key}={value}" for key, value in connection_params.items()]) - connection = psycopg2.connect(connection_string) - logger.info("Connected to the database!") - return connection - except Exception as e: - logger.error(f"Failed to connect to database: {e}") - if connection: - connection.rollback() - connection.close() - return None + global connection + if connection is None or connection.closed: + try: + secret = get_secret(DB_SECRET_NAME) + connection_params = { + 'dbname': secret["dbname"], + 'user': secret["username"], + 'password': secret["password"], + 'host': RDS_PROXY_ENDPOINT, + 'port': secret["port"] + } + connection_string = " ".join([f"{key}={value}" for key, value in connection_params.items()]) + connection = psycopg2.connect(connection_string) + logger.info("Connected to the database!") + except Exception as e: + logger.error(f"Failed to connect to database: {e}") + if connection: + connection.rollback() + connection.close() + raise + return connection def list_files_in_s3_prefix(bucket, prefix): files = [] @@ -103,7 +121,6 @@ def get_file_metadata_from_db(module_id, file_name, file_type): cur.execute(query, (module_id, file_name, file_type)) result = cur.fetchone() cur.close() - connection.close() if result: return result[0] @@ -115,9 +132,7 @@ def get_file_metadata_from_db(module_id, file_name, file_type): logger.error(f"Error retrieving metadata for {file_name}.{file_type}: {e}") if cur: cur.close() - if connection: - connection.rollback() - connection.close() + connection.rollback() return None @logger.inject_lambda_context diff --git a/cdk/lambda/getPresignedURL/getPresignedURL.py b/cdk/lambda/getPresignedURL/getPresignedURL.py new file mode 100644 index 0000000..180a746 --- /dev/null +++ b/cdk/lambda/getPresignedURL/getPresignedURL.py @@ -0,0 +1,27 @@ +import os +import boto3 +import json + +s3 = boto3.client('s3') +BUCKET_NAME = os.environ['BUCKET_NAME'] + +def lambda_handler(event, context): + try: + body = json.loads(event['body']) + file_key = body['fileKey'] + + url = s3.generate_presigned_url( + 'get_object', + Params={'Bucket': BUCKET_NAME, 'Key': file_key}, + ExpiresIn=300 + ) + + return { + 'statusCode': 200, + 'body': json.dumps({'url': url}) + } + except Exception as e: + return { + 'statusCode': 500, + 'body': json.dumps({'error': str(e)}) + } diff --git a/cdk/lambda/initializer/initializer.py b/cdk/lambda/initializer/initializer.py index 574d774..618dc4c 100644 --- a/cdk/lambda/initializer/initializer.py +++ b/cdk/lambda/initializer/initializer.py @@ -143,6 +143,14 @@ def handler(event, context): "engagement_details" text ); + CREATE TABLE IF NOT EXISTS "chatlogs_notifications" ( + "id" uuid PRIMARY KEY DEFAULT (uuid_generate_v4()), + "course_id" uuid NOT NULL, + "instructor_email" varchar NOT NULL, + "request_id" uuid NOT NULL, + "completion" boolean DEFAULT FALSE + ); + ALTER TABLE "User_Engagement_Log" ADD FOREIGN KEY ("enrolment_id") REFERENCES "Enrolments" ("enrolment_id") ON DELETE CASCADE ON UPDATE CASCADE; ALTER TABLE "User_Engagement_Log" ADD FOREIGN KEY ("user_id") REFERENCES "Users" ("user_id") ON DELETE CASCADE ON UPDATE CASCADE; ALTER TABLE "User_Engagement_Log" ADD FOREIGN KEY ("course_id") REFERENCES "Courses" ("course_id") ON DELETE CASCADE ON UPDATE CASCADE; @@ -164,6 +172,9 @@ def handler(event, context): ALTER TABLE "Messages" ADD FOREIGN KEY ("session_id") REFERENCES "Sessions" ("session_id") ON DELETE CASCADE ON UPDATE CASCADE; + ALTER TABLE "chatlogs_notifications" ADD FOREIGN KEY ("course_id") REFERENCES "Courses" ("course_id") ON DELETE CASCADE ON UPDATE CASCADE; + ALTER TABLE "chatlogs_notifications" ADD FOREIGN KEY ("instructor_email") REFERENCES "Users" ("user_email") ON DELETE CASCADE ON UPDATE CASCADE; + DO $$ BEGIN IF NOT EXISTS ( diff --git a/cdk/lambda/lib/appsync.js b/cdk/lambda/lib/appsync.js new file mode 100644 index 0000000..2bafd60 --- /dev/null +++ b/cdk/lambda/lib/appsync.js @@ -0,0 +1,5 @@ +exports.handler = async (event) => { + return { + isAuthorized: true, + }; + }; \ No newline at end of file diff --git a/cdk/lambda/lib/instructorFunction.js b/cdk/lambda/lib/instructorFunction.js index 2f6ab86..77c5947 100644 --- a/cdk/lambda/lib/instructorFunction.js +++ b/cdk/lambda/lib/instructorFunction.js @@ -1183,62 +1183,72 @@ exports.handler = async (event) => { }); } break; - case "GET /instructor/course_messages": + case "GET /instructor/check_notifications_status": + if ( + event.queryStringParameters != null && + event.queryStringParameters.instructor_email && + event.queryStringParameters.course_id + ) { + const { instructor_email, course_id } = event.queryStringParameters; + + try { + // Query to check the completion status in the chatlogs_notifications table + const notificationStatus = await sqlConnection` + SELECT completion, request_id + FROM "chatlogs_notifications" + WHERE instructor_email = ${instructor_email} AND course_id = ${course_id} + LIMIT 1; + `; + + // if exists, true or false, button should not be enabled + if (notificationStatus.length > 0) { + response.statusCode = 200; + response.body = JSON.stringify({ + isEnabled: false, + completionStatus: notificationStatus[0].completion, + requestId: notificationStatus[0].request_id + }); + } else { + response.statusCode = 200; + response.body = JSON.stringify({ + isEnabled: true, + completionStatus: null, + requestId: null + }); + } + } catch (err) { + response.statusCode = 500; + console.error(err); + response.body = JSON.stringify({ error: "Internal server error" }); + } + } else { + response.statusCode = 400; + response.body = JSON.stringify({ error: "instructor_email and course_id are required." }); + } + break; + case "DELETE /instructor/remove_completed_notification": if ( event.queryStringParameters != null && event.queryStringParameters.instructor_email && event.queryStringParameters.course_id ) { const { instructor_email, course_id } = event.queryStringParameters; - + try { - // Get the instructor user_id - const instructorResult = await sqlConnection` - SELECT user_id FROM "Users" WHERE user_email = ${instructor_email} LIMIT 1; - `; - const instructorId = instructorResult[0]?.user_id; - if (!instructorId) { + // Delete the row from the chatlogs_notifications table + const deleteResult = await sqlConnection` + DELETE FROM "chatlogs_notifications" + WHERE instructor_email = ${instructor_email} AND course_id = ${course_id} + RETURNING *; + `; + + if (deleteResult.length > 0) { + response.statusCode = 200; + response.body = JSON.stringify({ message: "Notification removed successfully." }); + } else { response.statusCode = 404; - response.body = JSON.stringify({ error: "Instructor not found" }); - break; + response.body = JSON.stringify({ error: "No notification found for the given instructor and course." }); } - // Query to fetch messages, session, and other related data - const data = await sqlConnection` - SELECT - u.user_id, - cm.module_name, - cc.concept_name, -- Added concept name - s.session_id, - m.message_content AS message, - m.student_sent AS "sent by student", -- Renamed to "sent by student" - CASE - WHEN sm.module_score = 100 THEN 'complete' - ELSE 'incomplete' - END AS competency_status, - m.time_sent AS timestamp - FROM - "Messages" m - JOIN - "Sessions" s ON m.session_id = s.session_id - JOIN - "Student_Modules" sm ON s.student_module_id = sm.student_module_id - JOIN - "Course_Modules" cm ON sm.course_module_id = cm.module_id - JOIN - "Course_Concepts" cc ON cm.concept_id = cc.concept_id - JOIN - "Enrolments" e ON sm.enrolment_id = e.enrolment_id - JOIN - "Users" u ON e.user_id = u.user_id - WHERE - cc.course_id = ${course_id} - ORDER BY - u.user_id, cm.module_name, s.session_id, m.time_sent; - - `; - - response.statusCode = 200; - response.body = JSON.stringify(data); } catch (err) { response.statusCode = 500; console.error(err); @@ -1246,10 +1256,7 @@ exports.handler = async (event) => { } } else { response.statusCode = 400; - response.body = JSON.stringify({ - error: - "instructor_email, student_email, and course_id are required", - }); + response.body = JSON.stringify({ error: "instructor_email and course_id are required." }); } break; default: diff --git a/cdk/lambda/lib/sqsFunction.js b/cdk/lambda/lib/sqsFunction.js new file mode 100644 index 0000000..ed0ef02 --- /dev/null +++ b/cdk/lambda/lib/sqsFunction.js @@ -0,0 +1,77 @@ +const { SQSClient, SendMessageCommand } = require("@aws-sdk/client-sqs"); +const { initializeConnection } = require("./lib.js"); + +const sqsClient = new SQSClient({ region: process.env.AWS_REGION }); +const { SM_DB_CREDENTIALS, RDS_PROXY_ENDPOINT } = process.env; +let sqlConnection = global.sqlConnection; + +exports.handler = async (event) => { + try { + // Parse the incoming event + console.log("Parsing instructor_email, course_id, and request_id"); + const { instructor_email, course_id, request_id } = JSON.parse(event.body); + + // Validate input + if (!instructor_email || !course_id || !request_id) { + return { + statusCode: 400, + headers: { + "Access-Control-Allow-Origin": "*", + "Access-Control-Allow-Headers": "Content-Type,X-Amz-Date,Authorization,X-Api-Key", + "Access-Control-Allow-Methods": "OPTIONS,POST", + }, + body: JSON.stringify({ error: "Missing instructor_email, course_id, or request_id" }), + }; + } + + // Initialize database connection if not already established + if (!sqlConnection) { + await initializeConnection(SM_DB_CREDENTIALS, RDS_PROXY_ENDPOINT); + sqlConnection = global.sqlConnection; + } + + // Insert the record into the chatlogs_notifications table + console.log("Inserting record into the chatlogs_notifications table with completion status FALSE"); + await sqlConnection` + INSERT INTO "chatlogs_notifications" ("course_id", "instructor_email", "request_id", "completion") + VALUES (${course_id}, ${instructor_email}, ${request_id}, false) + ON CONFLICT DO NOTHING; + `; + + // Prepare the SQS message + const params = { + QueueUrl: process.env.SQS_QUEUE_URL, + MessageBody: JSON.stringify({ instructor_email, course_id, request_id }), + MessageGroupId: course_id, // FIFO requires group ID + MessageDeduplicationId: `${instructor_email}-${course_id}-${request_id}`, // Deduplication ID + }; + + // Send the message to SQS + console.log("Sending message to SQS"); + const command = new SendMessageCommand(params); + await sqsClient.send(command); + console.log("Message sent to SQS"); + + // Return success response + return { + statusCode: 200, + headers: { + "Access-Control-Allow-Origin": "*", + "Access-Control-Allow-Headers": "Content-Type,X-Amz-Date,Authorization,X-Api-Key", + "Access-Control-Allow-Methods": "OPTIONS,POST", + }, + body: JSON.stringify({ message: "Job submitted and notification logged successfully" }), + }; + } catch (error) { + console.error("Error processing SQS function:", error); + return { + statusCode: 500, + headers: { + "Access-Control-Allow-Origin": "*", + "Access-Control-Allow-Headers": "Content-Type,X-Amz-Date,Authorization,X-Api-Key", + "Access-Control-Allow-Methods": "OPTIONS,POST", + }, + body: JSON.stringify({ error: "Internal Server Error" }), + }; + } +}; \ No newline at end of file diff --git a/cdk/lib/amplify-stack.ts b/cdk/lib/amplify-stack.ts index 431ad0b..d14a626 100644 --- a/cdk/lib/amplify-stack.ts +++ b/cdk/lib/amplify-stack.ts @@ -2,6 +2,7 @@ import { App, BasicAuth, GitHubSourceCodeProvider, + RedirectStatus, // Import RedirectStatus } from "@aws-cdk/aws-amplify-alpha"; import * as cdk from "aws-cdk-lib"; import { BuildSpec } from "aws-cdk-lib/aws-codebuild"; @@ -44,10 +45,6 @@ export class AmplifyStack extends cdk.Stack { cache: paths: - 'node_modules/**/*' - redirects: - - source: - target: / - status: 404 `); const username = cdk.aws_ssm.StringParameter.valueForStringParameter( @@ -73,10 +70,17 @@ export class AmplifyStack extends cdk.Stack { VITE_COGNITO_USER_POOL_CLIENT_ID: apiStack.getUserPoolClientId(), VITE_API_ENDPOINT: apiStack.getEndpointUrl(), VITE_IDENTITY_POOL_ID: apiStack.getIdentityPoolId(), + VITE_GRAPHQL_WS_URL: apiStack.getEventApiUrl(), }, buildSpec: BuildSpec.fromObjectToYaml(amplifyYaml), }); + amplifyApp.addCustomRule({ + source: "", + target: "/", + status: RedirectStatus.NOT_FOUND_REWRITE, + }); + amplifyApp.addBranch("main"); } } \ No newline at end of file diff --git a/cdk/lib/api-gateway-stack.ts b/cdk/lib/api-gateway-stack.ts index 7b62aa8..9cf34bd 100644 --- a/cdk/lib/api-gateway-stack.ts +++ b/cdk/lib/api-gateway-stack.ts @@ -5,6 +5,7 @@ import * as lambdaEventSources from "aws-cdk-lib/aws-lambda-event-sources"; import * as iam from "aws-cdk-lib/aws-iam"; import { Construct } from "constructs"; import { Duration } from "aws-cdk-lib"; +import * as wafv2 from "aws-cdk-lib/aws-wafv2"; import { Architecture, Code, @@ -24,6 +25,9 @@ import * as s3 from "aws-cdk-lib/aws-s3"; import * as bedrock from "aws-cdk-lib/aws-bedrock"; import * as secretsmanager from "aws-cdk-lib/aws-secretsmanager"; import * as ssm from "aws-cdk-lib/aws-ssm"; +import * as sqs from "aws-cdk-lib/aws-sqs"; +import * as appsync from "aws-cdk-lib/aws-appsync"; + export class ApiGatewayStack extends cdk.Stack { private readonly api: apigateway.SpecRestApi; @@ -31,6 +35,7 @@ export class ApiGatewayStack extends cdk.Stack { public readonly userPool: cognito.UserPool; public readonly identityPool: cognito.CfnIdentityPool; private readonly layerList: { [key: string]: LayerVersion }; + private eventApi: appsync.GraphqlApi; public readonly stageARN_APIGW: string; public readonly apiGW_basedURL: string; public readonly secret: secretsmanager.ISecret; @@ -38,6 +43,7 @@ export class ApiGatewayStack extends cdk.Stack { public getUserPoolId = () => this.userPool.userPoolId; public getUserPoolClientId = () => this.appClient.userPoolClientId; public getIdentityPoolId = () => this.identityPool.ref; + public getEventApiUrl = () => this.eventApi.graphqlUrl; public addLayer = (name: string, layer: LayerVersion) => (this.layerList[name] = layer); public getLayers = () => this.layerList; @@ -117,6 +123,22 @@ export class ApiGatewayStack extends cdk.Stack { this.layerList["postgres"] = postgres; this.layerList["jwt"] = jwt; + // Create FIFO SQS Queue for jobs that get classroom chatlogs for a course + const messagesQueue = new sqs.Queue(this, `${id}-MessagesQueue`, { + queueName: `${id}-messages-queue.fifo`, + fifo: true, + removalPolicy: cdk.RemovalPolicy.DESTROY, + visibilityTimeout: Duration.seconds(300), + }); + + messagesQueue.addToResourcePolicy( + new iam.PolicyStatement({ + actions: ["sqs:SendMessage"], + principals: [new iam.ServicePrincipal("lambda.amazonaws.com")], + resources: [messagesQueue.queueArn], + }) + ); + // Create Cognito user pool /** @@ -668,7 +690,7 @@ export class ApiGatewayStack extends cdk.Stack { "secretsmanager:PutSecretValue", ], resources: [ - `arn:aws:secretsmanager:${this.region}:${this.account}:secret:AILA/*`, + `arn:aws:secretsmanager:${this.region}:${this.account}:secret:*`, ], }) ); @@ -962,6 +984,7 @@ export class ApiGatewayStack extends cdk.Stack { "dynamodb:DescribeTable", "dynamodb:PutItem", "dynamodb:GetItem", + "dynamodb:UpdateItem", ], resources: [`arn:aws:dynamodb:${this.region}:${this.account}:table/*`], }) @@ -1344,5 +1367,314 @@ export class ApiGatewayStack extends cdk.Stack { resources: [tableNameParameter.parameterArn], }) ); + + ////////////////////////////// + ////////////////////////////// + + const authHandler = new lambda.Function(this, `${id}-AuthHandler`, { + runtime: lambda.Runtime.NODEJS_20_X, + code: lambda.Code.fromAsset("lambda/lib"), + handler: "appsync.handler", + functionName: `${id}-AuthHandler`, + }); + + // Create AppSync API + this.eventApi = new appsync.GraphqlApi(this, + `${id}-EventApi`, { + name: `${id}-EventApi`, + definition: appsync.Definition.fromFile("./graphql/schema.graphql"), + authorizationConfig: { + defaultAuthorization: { + authorizationType: appsync.AuthorizationType.LAMBDA, + lambdaAuthorizerConfig: { + handler: authHandler, + }, + }, + }, + xrayEnabled: true, + }); + + const notificationFunction = new lambda.Function( + this, + `${id}-NotificationFunction`, + { + runtime: lambda.Runtime.PYTHON_3_9, + code: lambda.Code.fromAsset("lambda/eventNotification"), + handler: "eventNotification.lambda_handler", + environment: { + APPSYNC_API_URL: this.eventApi.graphqlUrl, + APPSYNC_API_ID: this.eventApi.apiId, + REGION: this.region, + }, + functionName: `${id}-NotificationFunction`, + timeout: cdk.Duration.seconds(300), + memorySize: 128, + vpc: vpcStack.vpc, + role: lambdaRole, + }); + + notificationFunction.addToRolePolicy( + new iam.PolicyStatement({ + effect: iam.Effect.ALLOW, + actions: ['appsync:GraphQL'], + resources: [`arn:aws:appsync:${this.region}:${this.account}:apis/${this.eventApi.apiId}/*`], + }) + ); + + notificationFunction.addPermission("AppSyncInvokePermission", { + principal: new iam.ServicePrincipal("appsync.amazonaws.com"), + action: "lambda:InvokeFunction", + sourceArn: `arn:aws:appsync:${this.region}:${this.account}:apis/${this.eventApi.apiId}/*`, + }); + + const notificationLambdaDataSource = this.eventApi.addLambdaDataSource( + "NotificationLambdaDataSource", + notificationFunction + ); + + notificationLambdaDataSource.createResolver("ResolverEventApi", { + typeName: "Mutation", + fieldName: "sendNotification", + requestMappingTemplate: appsync.MappingTemplate.lambdaRequest(), + responseMappingTemplate: appsync.MappingTemplate.lambdaResult(), + }); + + // Add permission to allow main.py Lambda to invoke eventNotification Lambda + notificationFunction.grantInvoke(new iam.ServicePrincipal("lambda.amazonaws.com")); + + // Override the Logical ID of the Lambdas Function to get ARN in OpenAPI + const cfnNotificationFunction = notificationFunction.node + .defaultChild as lambda.CfnFunction; + cfnNotificationFunction.overrideLogicalId("NotificationFunction"); + + /** + * + * Create a Lambda function that populates SQS with parameters to start new job + */ + const sqsFunction = new lambda.Function(this, `${id}-sqsFunction`, { + runtime: lambda.Runtime.NODEJS_20_X, + code: lambda.Code.fromAsset("lambda/lib"), + handler: "sqsFunction.handler", + timeout: Duration.seconds(300), + environment: { + SQS_QUEUE_URL: messagesQueue.queueUrl, + SM_DB_CREDENTIALS: db.secretPathUser.secretName, + RDS_PROXY_ENDPOINT: db.rdsProxyEndpoint, + }, + vpc: db.dbInstance.vpc, + functionName: `${id}-sqsFunction`, + memorySize: 128, + layers: [postgres], + role: coglambdaRole, + }); + + messagesQueue.grantSendMessages(sqsFunction); + + sqsFunction.addToRolePolicy( + new iam.PolicyStatement({ + actions: ["sqs:SendMessage"], + resources: [messagesQueue.queueArn], + effect: iam.Effect.ALLOW, + }) + ); + + // Override the Logical ID of the Lambda Function to get ARN in OpenAPI + const cfnSqsFunction = sqsFunction.node + .defaultChild as lambda.CfnFunction; + cfnSqsFunction.overrideLogicalId("sqsFunction"); + + // Add the permission to the Lambda function's policy to allow API Gateway access + sqsFunction.addPermission("AllowApiGatewayInvoke", { + principal: new iam.ServicePrincipal("apigateway.amazonaws.com"), + action: "lambda:InvokeFunction", + sourceArn: `arn:aws:execute-api:${this.region}:${this.account}:${this.api.restApiId}/*/*/instructor*`, + }); + + const chatlogsBucket = new s3.Bucket( + this, + `${id}-chatlogsBucket`, + { + blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, + cors: [ + { + allowedHeaders: ["*"], + allowedMethods: [ + s3.HttpMethods.GET, + s3.HttpMethods.PUT, + s3.HttpMethods.HEAD, + s3.HttpMethods.POST, + s3.HttpMethods.DELETE, + ], + allowedOrigins: ["*"], + }, + ], + // When deleting the stack, need to empty the Bucket and delete it manually + removalPolicy: cdk.RemovalPolicy.RETAIN, + enforceSSL: true, + } + ); + + /** + * + * Create a Lambda function that gets triggered when SQS has new parameters + */ + const sqsTrigger = new lambda.DockerImageFunction(this, `${id}-SQSTriggerDockerFunc`, { + code: lambda.DockerImageCode.fromImageAsset("./sqsTrigger"), + memorySize: 512, + timeout: cdk.Duration.seconds(300), + vpc: vpcStack.vpc, // Pass the VPC + functionName: `${id}-SQSTriggerDockerFunc`, + environment: { + SM_DB_CREDENTIALS: db.secretPathUser.secretName, + RDS_PROXY_ENDPOINT: db.rdsProxyEndpoint, + CHATLOGS_BUCKET: chatlogsBucket.bucketName, + APPSYNC_API_URL: this.eventApi.graphqlUrl, + REGION: this.region, + }, + }); + + sqsTrigger.addEventSource( + new lambdaEventSources.SqsEventSource(messagesQueue, { + batchSize: 1, // Process messages one at a time + }) + ); + + // Override the Logical ID of the Lambda Function to get ARN in OpenAPI + const cfnSqsTrigger = sqsTrigger.node + .defaultChild as lambda.CfnFunction; + cfnSqsTrigger.overrideLogicalId( + "SQSTriggerDockerFunc" + ); + + chatlogsBucket.grantRead(sqsTrigger); + + // Add ListBucket permission explicitly + sqsTrigger.addToRolePolicy( + new iam.PolicyStatement({ + effect: iam.Effect.ALLOW, + actions: ["s3:ListBucket"], + resources: [chatlogsBucket.bucketArn], // Access to the specific bucket + }) + ); + + sqsTrigger.addToRolePolicy( + new iam.PolicyStatement({ + effect: iam.Effect.ALLOW, + actions: [ + "s3:PutObject", + "s3:GetObject", + "s3:DeleteObject", + "s3:HeadObject", + ], + resources: [ + `arn:aws:s3:::${chatlogsBucket.bucketName}/*`, // Grant access to all objects within this bucket + ], + }) + ); + + // Grant access to Secret Manager + sqsTrigger.addToRolePolicy( + new iam.PolicyStatement({ + effect: iam.Effect.ALLOW, + actions: [ + //Secrets Manager + "secretsmanager:GetSecretValue", + ], + resources: [ + `arn:aws:secretsmanager:${this.region}:${this.account}:secret:*`, + ], + }) + ); + + /** + * + * Create Lambda function that will return all the chatlog file names with their respective presigned URLs for a specified course and instructor + */ + const getChatLogsFunction = new lambda.Function(this, `${id}-GetChatLogsFunction`, { + runtime: lambda.Runtime.PYTHON_3_9, + code: lambda.Code.fromAsset("lambda/getChatLogsFunction"), + handler: "getChatLogsFunction.lambda_handler", + timeout: Duration.seconds(300), + memorySize: 128, + vpc: vpcStack.vpc, + environment: { + BUCKET: chatlogsBucket.bucketName, + REGION: this.region, + }, + functionName: `${id}-GetChatLogsFunction`, + layers: [psycopgLayer, powertoolsLayer], + }); + + // Override the Logical ID of the Lambda Function to get ARN in OpenAPI + const cfnGetChatLogsFunction = getChatLogsFunction.node + .defaultChild as lambda.CfnFunction; + cfnGetChatLogsFunction.overrideLogicalId("GetChatLogsFunction"); + + // Grant the Lambda function read-only permissions to the S3 bucket + chatlogsBucket.grantRead(getChatLogsFunction); + + // Add the permission to the Lambda function's policy to allow API Gateway access + getChatLogsFunction.addPermission("AllowApiGatewayInvoke", { + principal: new iam.ServicePrincipal("apigateway.amazonaws.com"), + action: "lambda:InvokeFunction", + sourceArn: `arn:aws:execute-api:${this.region}:${this.account}:${this.api.restApiId}/*/*/instructor*`, + }); + + // Waf Firewall + const waf = new wafv2.CfnWebACL(this, `${id}-waf`, { + description: "AILA waf", + scope: "REGIONAL", + defaultAction: { allow: {} }, + visibilityConfig: { + sampledRequestsEnabled: true, + cloudWatchMetricsEnabled: true, + metricName: "ailearningassistant-firewall", + }, + rules: [ + { + name: "AWS-AWSManagedRulesCommonRuleSet", + priority: 1, + statement: { + managedRuleGroupStatement: { + vendorName: "AWS", + name: "AWSManagedRulesCommonRuleSet", + }, + }, + overrideAction: { none: {} }, + visibilityConfig: { + sampledRequestsEnabled: true, + cloudWatchMetricsEnabled: true, + metricName: "AWS-AWSManagedRulesCommonRuleSet", + }, + }, + { + name: "LimitRequests1000", + priority: 2, + action: { + block: {}, + }, + statement: { + rateBasedStatement: { + limit: 1000, + aggregateKeyType: "IP", + }, + }, + visibilityConfig: { + sampledRequestsEnabled: true, + cloudWatchMetricsEnabled: true, + metricName: "LimitRequests1000", + }, + }, + ], + }); + const wafAssociation = new wafv2.CfnWebACLAssociation( + this, + `${id}-waf-association`, + { + resourceArn: `arn:aws:apigateway:${this.region}::/restapis/${this.api.restApiId}/stages/${this.api.deploymentStage.stageName}`, + webAclArn: waf.attrArn, + } + ); + } } diff --git a/cdk/lib/database-stack.ts b/cdk/lib/database-stack.ts index e6ed952..d7789bf 100644 --- a/cdk/lib/database-stack.ts +++ b/cdk/lib/database-stack.ts @@ -100,12 +100,27 @@ export class DatabaseStack extends Stack { parameterGroup: parameterGroup }); + // Add CIDR ranges of private subnets to inbound rules of RDS + const dbSecurityGroup = this.dbInstance.connections.securityGroups[0]; + if (vpcStack.privateSubnetsCidrStrings && vpcStack.privateSubnetsCidrStrings.length > 0) { + vpcStack.privateSubnetsCidrStrings.forEach((cidr) => { + dbSecurityGroup.addIngressRule( + ec2.Peer.ipv4(cidr), + ec2.Port.tcp(5432), + `Allow PostgreSQL traffic from private subnet CIDR range ${cidr}` + ); + }); + } else { + console.log("Deploying with new VPC. No need to add private subnet CIDR ranges to inbound rules of RDS."); + } + + // Add CIDR ranges of public subnets to inbound rules of RDS this.dbInstance.connections.securityGroups.forEach(function (securityGroup) { - // 10.0.0.0/16 match the cidr range in vpc stack + // Allow Postgres access in VPC securityGroup.addIngressRule( - ec2.Peer.ipv4(vpcStack.vpcCidrString), - ec2.Port.tcp(5432), - "Postgres Ingress" + ec2.Peer.ipv4(vpcStack.vpcCidrString), + ec2.Port.tcp(5432), + "Allow PostgreSQL traffic from public subnets" ); }); diff --git a/cdk/lib/dbFlow-stack.ts b/cdk/lib/dbFlow-stack.ts index 9859ff0..f4d9bb6 100644 --- a/cdk/lib/dbFlow-stack.ts +++ b/cdk/lib/dbFlow-stack.ts @@ -34,7 +34,7 @@ export class DBFlowStack extends Stack { "secretsmanager:PutSecretValue" ], resources: [ - `arn:aws:secretsmanager:${this.region}:${this.account}:secret:AILA/*`, + `arn:aws:secretsmanager:${this.region}:${this.account}:secret:*`, ], }) ); diff --git a/cdk/lib/vpc-stack.ts b/cdk/lib/vpc-stack.ts index 87cb754..71b1ffb 100644 --- a/cdk/lib/vpc-stack.ts +++ b/cdk/lib/vpc-stack.ts @@ -7,6 +7,8 @@ import { Fn } from 'aws-cdk-lib'; export class VpcStack extends Stack { public readonly vpc: ec2.Vpc; public readonly vpcCidrString: string; + public readonly privateSubnetsCidrStrings: string[]; + constructor(scope: Construct, id: string, props?: StackProps) { super(scope, id, props); @@ -51,6 +53,13 @@ export class VpcStack extends Stack { vpcCidrBlock: Fn.importValue(`${AWSControlTowerStackSet}-VPCCIDR`), }) as ec2.Vpc; + // Extract CIDR ranges from the private subnets + this.privateSubnetsCidrStrings = [ + Fn.importValue(`${AWSControlTowerStackSet}-PrivateSubnet1ACIDR`), + Fn.importValue(`${AWSControlTowerStackSet}-PrivateSubnet2ACIDR`), + Fn.importValue(`${AWSControlTowerStackSet}-PrivateSubnet3ACIDR`), + ]; + // Create a public subnet const publicSubnet = new ec2.Subnet(this, `PublicSubnet`, { vpcId: this.vpc.vpcId, @@ -66,28 +75,20 @@ export class VpcStack extends Stack { internetGatewayId: internetGateway.ref, }); - // Create a route table for the public subnet - const publicRouteTable = new ec2.CfnRouteTable(this, `PublicRouteTable`, { - vpcId: this.vpc.vpcId, - }); - - // Associate the public subnet with the new route table - new ec2.CfnSubnetRouteTableAssociation(this, `PublicSubnetAssociation`, { - subnetId: publicSubnet.subnetId, - routeTableId: publicRouteTable.ref, - }); - // Add a NAT Gateway in the public subnet const natGateway = new ec2.CfnNatGateway(this, `NatGateway`, { subnetId: publicSubnet.subnetId, allocationId: new ec2.CfnEIP(this, 'EIP', {}).attrAllocationId, }); - // Create a route to the Internet Gateway + // Use the route table associated with the public subnet + const publicRouteTableId = publicSubnet.routeTable.routeTableId; + + // Add a route to the Internet Gateway in the existing public route table new ec2.CfnRoute(this, `PublicRoute`, { - routeTableId: publicRouteTable.ref, - destinationCidrBlock: '0.0.0.0/0', - gatewayId: internetGateway.ref, + routeTableId: publicRouteTableId, + destinationCidrBlock: '0.0.0.0/0', + gatewayId: internetGateway.ref, }); // Update route table for private subnets diff --git a/cdk/package-lock.json b/cdk/package-lock.json index 2c67bbf..692797c 100644 --- a/cdk/package-lock.json +++ b/cdk/package-lock.json @@ -9,9 +9,12 @@ "version": "0.1.0", "dependencies": { "@aws-cdk/aws-amplify-alpha": "^2.146.0-alpha.0", + "@aws-cdk/aws-appsync-alpha": "^2.59.0-alpha.0", "@aws-cdk/custom-resources": "^1.204.0", "aws-cdk-lib": "^2.146.0", + "aws-sdk": "^2.1692.0", "constructs": "^10.0.0", + "postgres": "^3.4.5", "source-map-support": "^0.5.21", "yaml": "^2.4.5" }, @@ -126,6 +129,19 @@ "node": ">= 16.14.0" } }, + "node_modules/@aws-cdk/aws-appsync-alpha": { + "version": "2.59.0-alpha.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/aws-appsync-alpha/-/aws-appsync-alpha-2.59.0-alpha.0.tgz", + "integrity": "sha512-6Q1SpTw7F3TTrip7h7DHOqeCCPBeg0YveZZoWElHgYyi1LyVdMOvK8f2gxmrG9Z0mUpSeo2/FKkxUUC/+SJCLw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "aws-cdk-lib": "^2.59.0", + "constructs": "^10.0.0" + } + }, "node_modules/@aws-cdk/aws-autoscaling-common": { "version": "1.204.0", "resolved": "https://registry.npmjs.org/@aws-cdk/aws-autoscaling-common/-/aws-autoscaling-common-1.204.0.tgz", @@ -2247,6 +2263,21 @@ "sprintf-js": "~1.0.2" } }, + "node_modules/available-typed-arrays": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", + "integrity": "sha512-wvUjBtSGN7+7SjNpq/9M2Tg350UZD3q62IFZLbRAR1bSMlCo1ZaeW+BJ+D090e4hIIZLBcTDWe4Mh4jvUDajzQ==", + "license": "MIT", + "dependencies": { + "possible-typed-array-names": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/aws-cdk": { "version": "2.141.0", "resolved": "https://registry.npmjs.org/aws-cdk/-/aws-cdk-2.141.0.tgz", @@ -2604,6 +2635,28 @@ "node": ">= 6" } }, + "node_modules/aws-sdk": { + "version": "2.1692.0", + "resolved": "https://registry.npmjs.org/aws-sdk/-/aws-sdk-2.1692.0.tgz", + "integrity": "sha512-x511uiJ/57FIsbgUe5csJ13k3uzu25uWQE+XqfBis/sB0SFoiElJWXRkgEAUh0U6n40eT3ay5Ue4oPkRMu1LYw==", + "hasInstallScript": true, + "license": "Apache-2.0", + "dependencies": { + "buffer": "4.9.2", + "events": "1.1.1", + "ieee754": "1.1.13", + "jmespath": "0.16.0", + "querystring": "0.2.0", + "sax": "1.2.1", + "url": "0.10.3", + "util": "^0.12.4", + "uuid": "8.0.0", + "xml2js": "0.6.2" + }, + "engines": { + "node": ">= 10.0.0" + } + }, "node_modules/babel-jest": { "version": "29.7.0", "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz", @@ -2717,6 +2770,26 @@ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", "dev": true }, + "node_modules/base64-js": { + "version": "1.5.1", + "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", + "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, "node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", @@ -2792,11 +2865,69 @@ "node-int64": "^0.4.0" } }, + "node_modules/buffer": { + "version": "4.9.2", + "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.2.tgz", + "integrity": "sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg==", + "license": "MIT", + "dependencies": { + "base64-js": "^1.0.2", + "ieee754": "^1.1.4", + "isarray": "^1.0.0" + } + }, "node_modules/buffer-from": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz", "integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ==" }, + "node_modules/call-bind": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/call-bind/-/call-bind-1.0.8.tgz", + "integrity": "sha512-oKlSFMcMwpUg2ednkhQ454wfWiU/ul3CkJe/PEHcTKuiX6RpbehUiFMXu13HalGZxfUwCQzZG747YXBn1im9ww==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.0", + "es-define-property": "^1.0.0", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.1.tgz", + "integrity": "sha512-BhYE+WDaywFg2TBWYNXAE+8B1ATnThNBqXHP5nQu0jWJdVvY2hvkpyB3qOmtmDePiS5/BDQ8wASEWGMWRG148g==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.3.tgz", + "integrity": "sha512-YTd+6wGlNlPxSuri7Y6X8tY2dmm12UMH66RpKMhiX6rsk5wXXnYgbUcOt8kiS31/AjfoTOvCsE+w8nZQLQnzHA==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "get-intrinsic": "^1.2.6" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/callsites": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", @@ -3030,6 +3161,23 @@ "node": ">=0.10.0" } }, + "node_modules/define-data-property": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/define-data-property/-/define-data-property-1.1.4.tgz", + "integrity": "sha512-rBMvIzlpA8v6E+SJZoo++HAYqsLrkg7MSfIinMPFhmkorw7X+dOXVJQs+QT69zGkzMyfDnIMN2Wid1+NbL3T+A==", + "license": "MIT", + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/detect-newline": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", @@ -3057,6 +3205,20 @@ "node": "^14.15.0 || ^16.10.0 || >=18.0.0" } }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/electron-to-chromium": { "version": "1.4.806", "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.806.tgz", @@ -3090,6 +3252,36 @@ "is-arrayish": "^0.2.1" } }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.0.0.tgz", + "integrity": "sha512-MZ4iQ6JwHOBQjahnjwaC1ZtIBH+2ohjamzAO3oaHcXYup7qxjF2fixyH+Q71voWHeOkI2q/TnJao/KfXYIZWbw==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/escalade": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz", @@ -3121,6 +3313,15 @@ "node": ">=4" } }, + "node_modules/events": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz", + "integrity": "sha512-kEcvvCBByWXGnZy6JUlgAp2gBIUjfCAV6P6TgT1/aaQKcmuAEC4OZTV1I4EWQLz2gxZw76atuVyvHhTxvi0Flw==", + "license": "MIT", + "engines": { + "node": ">=0.4.x" + } + }, "node_modules/execa": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/execa/-/execa-5.1.1.tgz", @@ -3209,6 +3410,15 @@ "node": ">=8" } }, + "node_modules/for-each": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", + "integrity": "sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==", + "license": "MIT", + "dependencies": { + "is-callable": "^1.1.3" + } + }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -3233,7 +3443,6 @@ "version": "1.1.2", "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", - "dev": true, "funding": { "url": "https://github.com/sponsors/ljharb" } @@ -3256,6 +3465,30 @@ "node": "6.* || 8.* || >= 10.*" } }, + "node_modules/get-intrinsic": { + "version": "1.2.6", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.6.tgz", + "integrity": "sha512-qxsEs+9A+u85HhllWJJFicJfPDhRmjzoYdl64aMWW9yRIJmSyxdn8IEkuIM530/7T+lv0TIHd8L6Q/ra0tEoeA==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "dunder-proto": "^1.0.0", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.0.0", + "function-bind": "^1.1.2", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/get-package-type": { "version": "0.1.0", "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", @@ -3307,6 +3540,18 @@ "node": ">=4" } }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/graceful-fs": { "version": "4.2.11", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", @@ -3322,11 +3567,49 @@ "node": ">=8" } }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-property-descriptors/-/has-property-descriptors-1.0.2.tgz", + "integrity": "sha512-55JNKuIW+vq4Ke1BjOTjM2YctQIvCT7GFzHwmfZPGo5wnrgkid0YQtnAleFSqumZm4az3n2BS+erby5ipJdgrg==", + "license": "MIT", + "dependencies": { + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/hasown": { "version": "2.0.2", "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", - "dev": true, "dependencies": { "function-bind": "^1.1.2" }, @@ -3349,6 +3632,12 @@ "node": ">=10.17.0" } }, + "node_modules/ieee754": { + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.13.tgz", + "integrity": "sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==", + "license": "BSD-3-Clause" + }, "node_modules/import-local": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/import-local/-/import-local-3.1.0.tgz", @@ -3391,8 +3680,23 @@ "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", - "dev": true + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/is-arguments": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.2.0.tgz", + "integrity": "sha512-7bVbi0huj/wrIAOzb8U1aszg9kdi3KN/CyU19CTI7tAoZYEZoL9yCDXpbXN+uPsuWnP02cyug1gleqq+TU+YCA==", + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } }, "node_modules/is-arrayish": { "version": "0.2.1", @@ -3400,6 +3704,18 @@ "integrity": "sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg==", "dev": true }, + "node_modules/is-callable": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz", + "integrity": "sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-core-module": { "version": "2.13.1", "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.13.1.tgz", @@ -3430,6 +3746,21 @@ "node": ">=6" } }, + "node_modules/is-generator-function": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.0.10.tgz", + "integrity": "sha512-jsEjy9l3yiXEQ+PsXdmBwEPcOxaXWLspKdplFUVI9vq1iZgIekeC0L167qeu86czQaxed3q/Uzuw0swL0irL8A==", + "license": "MIT", + "dependencies": { + "has-tostringtag": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/is-number": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", @@ -3451,6 +3782,27 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/is-typed-array": { + "version": "1.1.15", + "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.15.tgz", + "integrity": "sha512-p3EcsicXjit7SaskXHs1hA91QxgTw46Fv6EFKKGS5DRFLD8yKnohjF3hxoju94b/OcMZoQukzpPpBE9uLVKzgQ==", + "license": "MIT", + "dependencies": { + "which-typed-array": "^1.1.16" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", + "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==", + "license": "MIT" + }, "node_modules/isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", @@ -4112,6 +4464,15 @@ "url": "https://github.com/chalk/supports-color?sponsor=1" } }, + "node_modules/jmespath": { + "version": "0.16.0", + "resolved": "https://registry.npmjs.org/jmespath/-/jmespath-0.16.0.tgz", + "integrity": "sha512-9FzQjJ7MATs1tSpnco1K6ayiYE3figslrXA72G2HQ/n76RzvYlofyi5QM+iX4YRs/pu3yzxlVQSST23+dMDknw==", + "license": "Apache-2.0", + "engines": { + "node": ">= 0.6.0" + } + }, "node_modules/js-tokens": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", @@ -4254,6 +4615,15 @@ "tmpl": "1.0.5" } }, + "node_modules/math-intrinsics": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.0.0.tgz", + "integrity": "sha512-4MqMiKP90ybymYvsut0CH2g4XWbfLtmlCkXmtmdcDCxNB+mQcu1w/1+L/VD7vi/PSv7X2JYV7SCcR+jiPXnQtA==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, "node_modules/merge-stream": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", @@ -4504,6 +4874,28 @@ "node": ">=8" } }, + "node_modules/possible-typed-array-names": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/possible-typed-array-names/-/possible-typed-array-names-1.0.0.tgz", + "integrity": "sha512-d7Uw+eZoloe0EHDIYoe+bQ5WXnGMOpmiZFTuMWCwpjzzkL2nTjcKiAk4hh8TjnGye2TwWOk3UXucZ+3rbmBa8Q==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/postgres": { + "version": "3.4.5", + "resolved": "https://registry.npmjs.org/postgres/-/postgres-3.4.5.tgz", + "integrity": "sha512-cDWgoah1Gez9rN3H4165peY9qfpEo+SA61oQv65O3cRUE1pOEoJWwddwcqKE8XZYjbblOJlYDlLV4h67HrEVDg==", + "license": "Unlicense", + "engines": { + "node": ">=12" + }, + "funding": { + "type": "individual", + "url": "https://github.com/sponsors/porsager" + } + }, "node_modules/pretty-format": { "version": "29.7.0", "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-29.7.0.tgz", @@ -4543,6 +4935,12 @@ "node": ">= 6" } }, + "node_modules/punycode": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", + "integrity": "sha512-RofWgt/7fL5wP1Y7fxE7/EmTLzQVnB0ycyibJ0OOHIlJqTNzglYFxVwETOcIoJqJmpDXJ9xImDv+Fq34F/d4Dw==", + "license": "MIT" + }, "node_modules/pure-rand": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-6.1.0.tgz", @@ -4559,6 +4957,15 @@ } ] }, + "node_modules/querystring": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", + "integrity": "sha512-X/xY82scca2tau62i9mDyU9K+I+djTMUsvwf7xnUX5GLvVzgJybOJf4Y6o9Zx3oJK/LSXg5tTZBjwzqVPaPO2g==", + "deprecated": "The querystring API is considered Legacy. new code should use the URLSearchParams API instead.", + "engines": { + "node": ">=0.4.x" + } + }, "node_modules/react-is": { "version": "18.3.1", "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", @@ -4621,6 +5028,12 @@ "node": ">=10" } }, + "node_modules/sax": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz", + "integrity": "sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA==", + "license": "ISC" + }, "node_modules/semver": { "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", @@ -4630,6 +5043,23 @@ "semver": "bin/semver.js" } }, + "node_modules/set-function-length": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz", + "integrity": "sha512-pgRc4hJ4/sNjWCSS9AmnS40x3bNMDTknHgL5UaMBTMyJnU90EgWh1Rz+MC9eFu4BuN/UwZjKQuY/1v3rM7HMfg==", + "license": "MIT", + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, "node_modules/shebang-command": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", @@ -5013,6 +5443,38 @@ "browserslist": ">= 4.21.0" } }, + "node_modules/url": { + "version": "0.10.3", + "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", + "integrity": "sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ==", + "license": "MIT", + "dependencies": { + "punycode": "1.3.2", + "querystring": "0.2.0" + } + }, + "node_modules/util": { + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/util/-/util-0.12.5.tgz", + "integrity": "sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA==", + "license": "MIT", + "dependencies": { + "inherits": "^2.0.3", + "is-arguments": "^1.0.4", + "is-generator-function": "^1.0.7", + "is-typed-array": "^1.1.3", + "which-typed-array": "^1.1.2" + } + }, + "node_modules/uuid": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.0.0.tgz", + "integrity": "sha512-jOXGuXZAWdsTH7eZLtyXMqUb9EcWMGZNbL9YcGBJl4MH4nrxHmZJhEHvyLFrkxo+28uLb/NYRcStH48fnD0Vzw==", + "license": "MIT", + "bin": { + "uuid": "dist/bin/uuid" + } + }, "node_modules/v8-compile-cache-lib": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz", @@ -5057,6 +5519,26 @@ "node": ">= 8" } }, + "node_modules/which-typed-array": { + "version": "1.1.18", + "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.18.tgz", + "integrity": "sha512-qEcY+KJYlWyLH9vNbsr6/5j59AXk5ni5aakf8ldzBvGde6Iz4sxZGkJyWSAueTG7QhOvNRYb1lDdFmL5Td0QKA==", + "license": "MIT", + "dependencies": { + "available-typed-arrays": "^1.0.7", + "call-bind": "^1.0.8", + "call-bound": "^1.0.3", + "for-each": "^0.3.3", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/wrap-ansi": { "version": "7.0.0", "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", @@ -5093,6 +5575,28 @@ "node": "^12.13.0 || ^14.15.0 || >=16.0.0" } }, + "node_modules/xml2js": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.6.2.tgz", + "integrity": "sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==", + "license": "MIT", + "dependencies": { + "sax": ">=0.6.0", + "xmlbuilder": "~11.0.0" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/xmlbuilder": { + "version": "11.0.1", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-11.0.1.tgz", + "integrity": "sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==", + "license": "MIT", + "engines": { + "node": ">=4.0" + } + }, "node_modules/y18n": { "version": "5.0.8", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz", diff --git a/cdk/package.json b/cdk/package.json index 4fd2915..7d8cb80 100644 --- a/cdk/package.json +++ b/cdk/package.json @@ -21,9 +21,12 @@ }, "dependencies": { "@aws-cdk/aws-amplify-alpha": "^2.146.0-alpha.0", + "@aws-cdk/aws-appsync-alpha": "^2.59.0-alpha.0", "@aws-cdk/custom-resources": "^1.204.0", "aws-cdk-lib": "^2.146.0", + "aws-sdk": "^2.1692.0", "constructs": "^10.0.0", + "postgres": "^3.4.5", "source-map-support": "^0.5.21", "yaml": "^2.4.5" } diff --git a/cdk/sqsTrigger/Dockerfile b/cdk/sqsTrigger/Dockerfile new file mode 100644 index 0000000..681a4bd --- /dev/null +++ b/cdk/sqsTrigger/Dockerfile @@ -0,0 +1,16 @@ +FROM public.ecr.aws/lambda/python:3.11 + +# Install system dependencies +RUN yum -y install postgresql-devel gcc libpq + +# Copy requirements.txt +COPY requirements.txt ${LAMBDA_TASK_ROOT} + +# Install Python packages +RUN pip install --no-cache-dir -r requirements.txt + +# Copy the source code +COPY src/ ${LAMBDA_TASK_ROOT} + +# Set the CMD to your handler +CMD [ "main.handler" ] diff --git a/cdk/sqsTrigger/requirements.txt b/cdk/sqsTrigger/requirements.txt new file mode 100644 index 0000000..0c5f0d7 --- /dev/null +++ b/cdk/sqsTrigger/requirements.txt @@ -0,0 +1,8 @@ +boto3 +botocore +sqlalchemy +Pillow +pymupdf +psycopg[binary,pool] +psycopg2-binary +httpx \ No newline at end of file diff --git a/cdk/sqsTrigger/src/main.py b/cdk/sqsTrigger/src/main.py new file mode 100644 index 0000000..c6f6e81 --- /dev/null +++ b/cdk/sqsTrigger/src/main.py @@ -0,0 +1,278 @@ +import os +import json +import logging +import boto3 +import psycopg2 +import csv +import httpx +import time +from datetime import datetime +from botocore.exceptions import ClientError + +# Set up basic logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger() + +# Environment variables +DB_SECRET_NAME = os.environ["SM_DB_CREDENTIALS"] +REGION = os.environ["REGION"] +CHATLOGS_BUCKET = os.environ["CHATLOGS_BUCKET"] +RDS_PROXY_ENDPOINT = os.environ["RDS_PROXY_ENDPOINT"] +APPSYNC_API_URL = os.environ["APPSYNC_API_URL"] + +# AWS Clients +secrets_manager_client = boto3.client("secretsmanager") +s3_client = boto3.client("s3") + +# Cached resources +connection = None +db_secret = None + +def get_secret(): + global db_secret + if db_secret is None: + try: + response = secrets_manager_client.get_secret_value(SecretId=DB_SECRET_NAME)["SecretString"] + db_secret = json.loads(response) + except Exception as e: + logger.error(f"Error fetching DB secret: {e}") + raise + return db_secret + +def connect_to_db(): + global connection + if connection is None or connection.closed: + try: + secret = get_secret() + connection_params = { + 'dbname': secret["dbname"], + 'user': secret["username"], + 'password': secret["password"], + 'host': RDS_PROXY_ENDPOINT, + 'port': secret["port"] + } + connection_string = " ".join([f"{key}={value}" for key, value in connection_params.items()]) + connection = psycopg2.connect(connection_string) + logger.info("Connected to the database!") + print("Connected to the database!") + except Exception as e: + logger.error(f"Failed to connect to database: {e}") + if connection: + connection.rollback() + connection.close() + raise + return connection + + +def query_chat_logs(course_id): + """ + Queries the database to fetch chat logs for a given course_id. + """ + connection = connect_to_db() + if connection is None: + error_message = "Database connection is unavailable." + logger.error(error_message) + raise Exception(error_message) + + try: + cur = connection.cursor() + query = """ + SELECT + u.user_id, + cm.module_name, + cc.concept_name, + s.session_id, + m.message_content AS message, + m.student_sent AS sent_by_student, + CASE + WHEN sm.module_score = 100 THEN 'complete' + ELSE 'incomplete' + END AS competency_status, + m.time_sent AS timestamp + FROM + "Messages" m + JOIN + "Sessions" s ON m.session_id = s.session_id + JOIN + "Student_Modules" sm ON s.student_module_id = sm.student_module_id + JOIN + "Course_Modules" cm ON sm.course_module_id = cm.module_id + JOIN + "Course_Concepts" cc ON cm.concept_id = cc.concept_id + JOIN + "Enrolments" e ON sm.enrolment_id = e.enrolment_id + JOIN + "Users" u ON e.user_id = u.user_id + WHERE + cc.course_id = %s + ORDER BY + u.user_id, cm.module_name, s.session_id, m.time_sent; + """ + cur.execute(query, (course_id,)) + results = cur.fetchall() + logger.info(f"Fetched {len(results)} chat log records for course_id: {course_id}.") + print(f"Fetched {len(results)} chat log records for course_id: {course_id}.") + cur.close() + return results + except Exception as e: + if cur: + cur.close() + connection.rollback() + logger.error(f"Error querying chat logs for course_id {course_id}: {e}") + raise + + +def write_to_csv(data, course_id, instructor_email): + """ + Writes the queried data to a CSV file. + """ + timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + file_name = f"{timestamp}.csv" + file_dir = f"/tmp/{course_id}/{instructor_email}" + file_path = f"{file_dir}/{file_name}" + + try: + # Ensure the directory exists (including nested directories) + os.makedirs(file_dir, exist_ok=True) + + # Write the data to the CSV file + with open(file_path, mode="w", newline="") as file: + writer = csv.writer(file) + writer.writerow([ + "user_id", "module_name", "concept_name", "session_id", + "message", "sent_by_student", "competency_status", "timestamp" + ]) + writer.writerows(data) + + logger.info(f"CSV file created successfully: {file_path}") + print(f"CSV file created successfully: {file_path}") + return file_path, file_name + except Exception as e: + logger.error(f"Error writing to CSV file {file_name}: {e}") + raise + + +def upload_to_s3(file_path, course_id, instructor_email, file_name): + """ + Uploads the file to S3 with the specified path. + """ + # Construct the S3 key (path in the bucket) + s3_key = f"{course_id}/{instructor_email}/{file_name}" + + try: + s3_client.upload_file(file_path, CHATLOGS_BUCKET, s3_key) + logger.info(f"File uploaded successfully to S3: s3://{CHATLOGS_BUCKET}/{s3_key}") + print(f"File uploaded successfully to S3: s3://{CHATLOGS_BUCKET}/{s3_key}") + return f"s3://{CHATLOGS_BUCKET}/{s3_key}" + except Exception as e: + logger.error(f"Error uploading file to S3: {e}") + raise + +def update_completion_status(course_id, instructor_email, request_id): + """ + Updates the completion status to True in the chatlogs_notifications table. + """ + connection = connect_to_db() + if connection is None: + error_message = "Database connection is unavailable." + logger.error(error_message) + raise Exception(error_message) + + try: + cur = connection.cursor() + update_query = """ + UPDATE chatlogs_notifications + SET completion = TRUE + WHERE course_id = %s AND instructor_email = %s AND request_id = %s; + """ + cur.execute(update_query, (course_id, instructor_email, request_id)) + connection.commit() + cur.close() + logger.info(f"Completion status updated for course_id: {course_id}, instructor_email: {instructor_email}, request_id: {request_id}.") + print(f"Completion status updated for course_id: {course_id}, instructor_email: {instructor_email}, request_id: {request_id}.") + except Exception as e: + if cur: + cur.close() + connection.rollback() + logger.error(f"Error updating completion status for course_id {course_id}, instructor_email {instructor_email}, request_id {request_id}: {e}") + raise + + +def invoke_event_notification(request_id, message="Chat logs successfully uploaded"): + try: + query = """ + mutation sendNotification($message: String!, $request_id: String!) { + sendNotification(message: $message, request_id: $request_id) { + message + request_id + } + } + """ + headers = {"Content-Type": "application/json", "Authorization": "API_KEY"} + payload = { + "query": query, + "variables": { + "message": message, + "request_id": request_id, + } + } + + # Delay to ensure client subscribes before mutation is sent + time.sleep(1) + + # Send the request to AppSync + with httpx.Client() as client: + response = client.post(APPSYNC_API_URL, headers=headers, json=payload) + response_data = response.json() + + logger.info(f"RESPONSE: {response}") + print(f"RESPONSE: {response}") + + if response.status_code != 200 or "errors" in response_data: + logger.error(f"Failed to send notification to AppSync: {response_data}") + raise Exception(f"Failed to send notification: {response_data}") + + logger.info(f"Notification sent successfully: {response_data}") + print(f"Notification sent successfully: {response_data}") + except Exception as e: + logger.error(f"Error invoking AppSync notification: {e}") + raise + + +def handler(event, context): + try: + if "Records" not in event: + logger.error("Invalid event format: missing 'Records'.") + raise ValueError("Event does not contain 'Records'.") + + for record in event["Records"]: + try: + message_body = json.loads(record["body"]) + course_id = message_body.get("course_id") + instructor_email = message_body.get("instructor_email") + request_id = message_body.get("request_id") + + if not course_id or not instructor_email or not request_id: + logger.error("Missing required parameters: course_id or instructor_email or request_id.") + continue + + chat_logs = query_chat_logs(course_id) + print("GOT chat_logs") + csv_path, csv_name = write_to_csv(chat_logs, course_id, instructor_email) + print("GOT got csv_path and csv_name") + s3_uri = upload_to_s3(csv_path, course_id, instructor_email, csv_name) + print("GOT s3_uri") + update_completion_status(course_id, instructor_email, request_id) + print("Updating completion status") + invoke_event_notification(request_id, message=f"Chat logs uploaded to {s3_uri}") + print("FINALLY SENT NOTIFICATION") + + except Exception as e: + logger.error(f"Error processing SQS message: {e}") + continue + + return {"statusCode": 200, "body": json.dumps({"message": "Processing completed successfully."})} + + except Exception as e: + logger.error(f"Unhandled error in sqsTrigger handler: {e}") + return {"statusCode": 500, "body": json.dumps({"error": str(e)})} \ No newline at end of file diff --git a/cdk/text_generation/src/main.py b/cdk/text_generation/src/main.py index 97a9cae..1b8102d 100644 --- a/cdk/text_generation/src/main.py +++ b/cdk/text_generation/src/main.py @@ -13,81 +13,105 @@ logging.basicConfig(level=logging.INFO) logger = logging.getLogger() - +# Environment variables DB_SECRET_NAME = os.environ["SM_DB_CREDENTIALS"] REGION = os.environ["REGION"] RDS_PROXY_ENDPOINT = os.environ["RDS_PROXY_ENDPOINT"] +BEDROCK_LLM_PARAM = os.environ["BEDROCK_LLM_PARAM"] +EMBEDDING_MODEL_PARAM = os.environ["EMBEDDING_MODEL_PARAM"] +TABLE_NAME_PARAM = os.environ["TABLE_NAME_PARAM"] -def get_secret(secret_name, expect_json=True): - try: - # secretsmanager client to get db credentials - sm_client = boto3.client("secretsmanager") - response = sm_client.get_secret_value(SecretId=secret_name)["SecretString"] - - if expect_json: - return json.loads(response) - else: - print(response) - return response +# AWS Clients +secrets_manager_client = boto3.client("secretsmanager") +ssm_client = boto3.client("ssm", region_name=REGION) +bedrock_runtime = boto3.client("bedrock-runtime", region_name=REGION) - except json.JSONDecodeError as e: - logger.error(f"Failed to decode JSON for secret {secret_name}: {e}") - raise ValueError(f"Secret {secret_name} is not properly formatted as JSON.") - except Exception as e: - logger.error(f"Error fetching secret {secret_name}: {e}") - raise +# Cached resources +connection = None +db_secret = None +BEDROCK_LLM_ID = None +EMBEDDING_MODEL_ID = None +TABLE_NAME = None -def get_parameter(param_name): +# Cached embeddings instance +embeddings = None + +def get_secret(secret_name, expect_json=True): + global db_secret + if db_secret is None: + try: + response = secrets_manager_client.get_secret_value(SecretId=secret_name)["SecretString"] + db_secret = json.loads(response) if expect_json else response + except json.JSONDecodeError as e: + logger.error(f"Failed to decode JSON for secret: {e}") + raise ValueError(f"Secret is not properly formatted as JSON.") + except Exception as e: + logger.error(f"Error fetching secret: {e}") + raise + return db_secret + +def get_parameter(param_name, cached_var): """ Fetch a parameter value from Systems Manager Parameter Store. """ - try: - ssm_client = boto3.client("ssm", region_name=REGION) - response = ssm_client.get_parameter(Name=param_name, WithDecryption=True) - return response["Parameter"]["Value"] - except Exception as e: - logger.error(f"Error fetching parameter {param_name}: {e}") - raise - -## GET PARAMETER VALUES FOR CONSTANTS -BEDROCK_LLM_ID = get_parameter(os.environ["BEDROCK_LLM_PARAM"]) -EMBEDDING_MODEL_ID = get_parameter(os.environ["EMBEDDING_MODEL_PARAM"]) -TABLE_NAME = get_parameter(os.environ["TABLE_NAME_PARAM"]) - -## GETTING AMAZON TITAN EMBEDDINGS MODEL -bedrock_runtime = boto3.client( - service_name="bedrock-runtime", - region_name=REGION, - ) - -embeddings = BedrockEmbeddings( - model_id=EMBEDDING_MODEL_ID, - client=bedrock_runtime, - region_name=REGION -) - -create_dynamodb_history_table(TABLE_NAME) + if cached_var is None: + try: + response = ssm_client.get_parameter(Name=param_name, WithDecryption=True) + cached_var = response["Parameter"]["Value"] + except Exception as e: + logger.error(f"Error fetching parameter {param_name}: {e}") + raise + return cached_var + +def initialize_constants(): + global BEDROCK_LLM_ID, EMBEDDING_MODEL_ID, TABLE_NAME, embeddings + BEDROCK_LLM_ID = get_parameter(BEDROCK_LLM_PARAM, BEDROCK_LLM_ID) + EMBEDDING_MODEL_ID = get_parameter(EMBEDDING_MODEL_PARAM, EMBEDDING_MODEL_ID) + TABLE_NAME = get_parameter(TABLE_NAME_PARAM, TABLE_NAME) + + if embeddings is None: + embeddings = BedrockEmbeddings( + model_id=EMBEDDING_MODEL_ID, + client=bedrock_runtime, + region_name=REGION, + ) + + create_dynamodb_history_table(TABLE_NAME) + +def connect_to_db(): + global connection + if connection is None or connection.closed: + try: + secret = get_secret(DB_SECRET_NAME) + connection_params = { + 'dbname': secret["dbname"], + 'user': secret["username"], + 'password': secret["password"], + 'host': RDS_PROXY_ENDPOINT, + 'port': secret["port"] + } + connection_string = " ".join([f"{key}={value}" for key, value in connection_params.items()]) + connection = psycopg2.connect(connection_string) + logger.info("Connected to the database!") + except Exception as e: + logger.error(f"Failed to connect to database: {e}") + if connection: + connection.rollback() + connection.close() + raise + return connection def get_module_name(module_id): - connection = None - cur = None - try: - logger.info(f"Fetching module name for module_id: {module_id}") - db_secret = get_secret(DB_SECRET_NAME) - - connection_params = { - 'dbname': db_secret["dbname"], - 'user': db_secret["username"], - 'password': db_secret["password"], - 'host': RDS_PROXY_ENDPOINT, - 'port': db_secret["port"] + connection = connect_to_db() + if connection is None: + logger.error("No database connection available.") + return { + "statusCode": 500, + "body": json.dumps("Database connection failed.") } - - connection_string = " ".join([f"{key}={value}" for key, value in connection_params.items()]) - - connection = psycopg2.connect(connection_string) + + try: cur = connection.cursor() - logger.info("Connected to RDS instance!") cur.execute(""" SELECT module_name @@ -100,7 +124,6 @@ def get_module_name(module_id): module_name = result[0] if result else None cur.close() - connection.close() if module_name: logger.info(f"Module name for module_id {module_id} found: {module_name}") @@ -111,34 +134,21 @@ def get_module_name(module_id): except Exception as e: logger.error(f"Error fetching module name: {e}") - if connection: - connection.rollback() - return None - finally: if cur: cur.close() - if connection: - connection.close() - logger.info("Connection closed.") + connection.rollback() + return None def get_system_prompt(course_id): - connection = None - cur = None - try: - logger.info(f"Fetching system prompt for course_id: {course_id}") - db_secret = get_secret(DB_SECRET_NAME) - - connection_params = { - 'dbname': db_secret["dbname"], - 'user': db_secret["username"], - 'password': db_secret["password"], - 'host': RDS_PROXY_ENDPOINT, - 'port': db_secret["port"] + connection = connect_to_db() + if connection is None: + logger.error("No database connection available.") + return { + "statusCode": 500, + "body": json.dumps("Database connection failed.") } - - connection_string = " ".join([f"{key}={value}" for key, value in connection_params.items()]) - - connection = psycopg2.connect(connection_string) + + try: cur = connection.cursor() logger.info("Connected to RDS instance!") @@ -153,7 +163,6 @@ def get_system_prompt(course_id): system_prompt = result[0] if result else None cur.close() - connection.close() if system_prompt: logger.info(f"System prompt for course_id {course_id} found: {system_prompt}") @@ -164,18 +173,14 @@ def get_system_prompt(course_id): except Exception as e: logger.error(f"Error fetching system prompt: {e}") - if connection: - connection.rollback() - return None - finally: if cur: cur.close() - if connection: - connection.close() - logger.info("Connection closed.") + connection.rollback() + return None def handler(event, context): logger.info("Text Generation Lambda function is called!") + initialize_constants() query_params = event.get("queryStringParameters", {}) diff --git a/docs/architectureDeepDive.md b/docs/architectureDeepDive.md index 1d6995a..2ca43f4 100644 --- a/docs/architectureDeepDive.md +++ b/docs/architectureDeepDive.md @@ -17,6 +17,11 @@ 9. Users can start chatting with the LLM by sending an API request that invokes the Lambda function to generate a response. The Lambda function runs a Docker container with Amazon ECR. 10. The lambda function stores the embedded messages in Amazon DynamoDB 11. This lambda function uses RAG architecture to retrieve the response from LLMs hosted on Amazon Bedrock augmented with the course's information stored in the Amazon RDS. +12. When an instructor clicks download chat logs, it is queued in Amazon SQS. +13. An AWS Lambda function is triggered by the SQS queue to process the chat messages asynchronously. +14. The processed chat messages are then stored in the Amazon RDS database for structured storage and retrieval. +15. Additionally, chat logs are stored in Amazon S3. +16. The Lambda function also interacts with AWS AppSync (GraphQL) to update the frontend chat interface in real-time, with notifications for when the CSV is finished downlaoding. ## Database Schema diff --git a/docs/deploymentGuide.md b/docs/deploymentGuide.md index 0362fb1..0c5de80 100644 --- a/docs/deploymentGuide.md +++ b/docs/deploymentGuide.md @@ -74,32 +74,125 @@ cd AI-Learning-Assistant ``` ### Step 2: Upload Secrets -You would have to supply your GitHub personal access token you created earlier when deploying the solution. Run the following command and ensure you replace `` and `` with your actual GitHub token and the appropriate AWS profile name. -``` +You would have to supply your GitHub personal access token you created earlier when deploying the solution. Run the following command and ensure you replace `` and `` with your actual GitHub token and the appropriate AWS profile name. Select the command corresponding to your operating system from the options below. + +
+macOS + +```bash aws secretsmanager create-secret \ --name github-personal-access-token \ - --secret-string '{\"my-github-token\":\"\"}'\ + --secret-string '{"my-github-token": ""}' \ --profile ``` -Moreover, you will need to upload your github username to Amazon SSM Parameter Store. You can do so by running the following command. Make sure you replace `` and `` with your actual username and the appropriate AWS profile name. +
+ +
+Windows CMD + +```cmd +aws secretsmanager create-secret ^ + --name github-personal-access-token ^ + --secret-string "{\"my-github-token\": \"\"}" ^ + --profile +``` + +
+
+PowerShell + +```powershell +aws secretsmanager create-secret ` + --name github-personal-access-token ` + --secret-string '{"my-github-token": ""}' ` + --profile ``` +
+ +  + +Moreover, you will need to upload your github username to Amazon SSM Parameter Store. You can do so by running the following command. Make sure you replace `` and `` with your actual username and the appropriate AWS profile name. + + +
+macOS + +```bash aws ssm put-parameter \ --name "aila-owner-name" \ --value "" \ --type String \ --profile ``` +
-You would have to supply a custom database username when deploying the solution to increase security. Run the following command and ensure you replace `` with the custom name of your choice. +
+Windows CMD +```cmd +aws ssm put-parameter ^ + --name "aila-owner-name" ^ + --value "" ^ + --type String ^ + --profile ``` + +
+ +
+PowerShell + +```powershell +aws ssm put-parameter ` + --name "aila-owner-name" ` + --value "" ` + --type String ` + --profile +``` +
+ +  + +You would have to supply a custom database username when deploying the solution to increase security. Run the following command and ensure you replace `` with the custom name of your choice. + + +
+macOS + +```bash aws secretsmanager create-secret \ - --name AILASecrets \ - --secret-string '{\"DB_Username\":\"\"}'\ + --name AILASecret \ + --secret-string "{\"DB_Username\":\"\"}"\ + --profile +``` +
+ +
+Windows CMD + +```cmd +aws secretsmanager create-secret ^ + --name AILASecret ^ + --secret-string "{\"DB_Username\":\"\"}"^ + --profile +``` + +
+ +
+PowerShell + +```powershell +aws secretsmanager create-secret ` + --name AILASecret ` + --secret-string "{\"DB_Username\":\"\"}"` --profile ``` +
+ +  For example, @@ -112,13 +205,44 @@ aws secretsmanager create-secret \ Finally, in order to restrict user sign up to specific email domains, you will need to upload a comma separated list of allowed email domains to Amazon SSM Parameter Store. You can do so by running the following command. Make sure you replace `` and `` with your actual list and the appropriate AWS profile name. -``` +
+macOS + +```bash aws ssm put-parameter \ --name "/AILA/AllowedEmailDomains" \ --value "" \ --type SecureString \ --profile ``` +
+ +
+Windows CMD + +```cmd +aws ssm put-parameter ^ + --name "/AILA/AllowedEmailDomains" ^ + --value "" ^ + --type SecureString ^ + --profile +``` + +
+ +
+PowerShell + +```powershell +aws ssm put-parameter ` + --name "/AILA/AllowedEmailDomains" ` + --value "" ` + --type SecureString ` + --profile +``` +
+ +  For example, @@ -180,14 +304,14 @@ cdk bootstrap aws:/// --profile -Amplify:githubRepoName=Digital-Strategy-Assistant \ + --parameters -AmplifyStack:githubRepoName=AI-Learning-Assistant \ --context StackPrefix= \ --profile ``` For example: ``` -cdk deploy --all --parameters AILA-Amplify:githubRepoName=Digital-Strategy-Assistant --context StackPrefix=AILA --profile +cdk deploy --all --parameters AILA-AmplifyStack:githubRepoName=AI-Learning-Assistant --context StackPrefix=AILA --profile ``` If you have trouble running the commands, try removing all the \ and run it in one line. diff --git a/docs/images/NetworkDiagram.png b/docs/images/NetworkDiagram.png new file mode 100644 index 0000000..f313fcf Binary files /dev/null and b/docs/images/NetworkDiagram.png differ diff --git a/docs/images/SecurityConfiguration.png b/docs/images/SecurityConfiguration.png new file mode 100644 index 0000000..9f69b5b Binary files /dev/null and b/docs/images/SecurityConfiguration.png differ diff --git a/docs/images/SharedResponsibilityModel.png b/docs/images/SharedResponsibilityModel.png new file mode 100644 index 0000000..09bc59f Binary files /dev/null and b/docs/images/SharedResponsibilityModel.png differ diff --git a/docs/images/architecture.drawio.xml b/docs/images/architecture.drawio.xml new file mode 100644 index 0000000..346f32e --- /dev/null +++ b/docs/images/architecture.drawio.xml @@ -0,0 +1,276 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/images/architecture.png b/docs/images/architecture.png index be892cd..9f2afae 100644 Binary files a/docs/images/architecture.png and b/docs/images/architecture.png differ diff --git a/docs/images/instructor-analytics.png b/docs/images/instructor-analytics.png index 371da26..81eeed3 100644 Binary files a/docs/images/instructor-analytics.png and b/docs/images/instructor-analytics.png differ diff --git a/docs/images/instructor-chat-history.png b/docs/images/instructor-chat-history.png new file mode 100644 index 0000000..0c9381c Binary files /dev/null and b/docs/images/instructor-chat-history.png differ diff --git a/docs/images/instructor-edit-concept.png b/docs/images/instructor-edit-concept.png index 4526504..8a5275f 100644 Binary files a/docs/images/instructor-edit-concept.png and b/docs/images/instructor-edit-concept.png differ diff --git a/docs/images/instructor-edit-modules.png b/docs/images/instructor-edit-modules.png index ed19ff9..c64efcb 100644 Binary files a/docs/images/instructor-edit-modules.png and b/docs/images/instructor-edit-modules.png differ diff --git a/docs/images/instructor-prompt-settings.png b/docs/images/instructor-prompt-settings.png index e54a8c0..9d6a2d9 100644 Binary files a/docs/images/instructor-prompt-settings.png and b/docs/images/instructor-prompt-settings.png differ diff --git a/docs/images/instructor-view-students.png b/docs/images/instructor-view-students.png index aae450f..f7c8a8b 100644 Binary files a/docs/images/instructor-view-students.png and b/docs/images/instructor-view-students.png differ diff --git a/docs/images/network.drawio.xml b/docs/images/network.drawio.xml new file mode 100644 index 0000000..ded70e5 --- /dev/null +++ b/docs/images/network.drawio.xml @@ -0,0 +1,182 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/images/s3-workflow.png b/docs/images/s3-workflow.png new file mode 100644 index 0000000..7168dfd Binary files /dev/null and b/docs/images/s3-workflow.png differ diff --git a/docs/securityGuide.md b/docs/securityGuide.md new file mode 100644 index 0000000..9427f54 --- /dev/null +++ b/docs/securityGuide.md @@ -0,0 +1,514 @@ +# Security Documentation & Network Architecture + +## Shared Responsibility Model + +![Shared Responsibility Model](images/SharedResponsibilityModel.png) + +#### The AWS Shared Responsibility Model defines the division of security responsibilities between CIC and its sponsors. At CIC, we are responsible for securing the cloud, while customers are responsible for securing their applications and data within the cloud + + +### CIC Responsibilities (Security of the Cloud): +- Infrastructure Security +- Network Protection +- Compliance with Industry Security Standards +- Service-Level Security + + +### Customer Responsibilities (Security in the Cloud): +- Data Protection +- Identity & Access Management +- Application Security +- Network Security Configuration: + +[Learn more](https://aws.amazon.com/compliance/shared-responsibility-model/) + + +This document outlines the existing network and security configurations implemented for this project. Additionally, it provides recommendations and guidance on leveraging AWS services and features to enhance security, monitor application performance, and maintain compliance + + +## 1. Network Architecture + +![Network Architecture Diagram](images/NetworkDiagram.png) + + +### 1.1 VPC & Subnets +VPC Configuration: +- Leveraged existing VPC in AWS Account since organizational policies prevent new VPC creation +- CIDR Range is inherited from existing VPC configuration + +#### Subnet Configuration: + +| Subnet Type | AZ | Key Services | +|-------------|----------------|------------------------------------| +| Private | ca-central-1a | Lambda | +| Private | ca-central-1b | RDS Proxy, Amazon RDS | +| Private | ca-central-1c | Backup RDS | +| Public | ca-central-1 | NAT Gateway, Internet Gateway | + + +#### Services Deployment: + +#### Private Subnets: +- **AWS Lambda:** + - Runtime environment for application logic + - No public IP addresses + - Outbound internet via NAT Gateway + +- **Amazon RDS (PostgreSQL):** + - Accessed exclusively via RDS Proxy + - No direct public access + - Encrypted connections via SSL/TLS + + Since VPC Endpoints are not used, Lambda accesses S3, ECR, and other AWS services over the public internet through the NAT Gateway. + + +#### Public Subnets: +- **NAT Gateway:** [Learn more](https://docs.aws.amazon.com/vpc/latest/userguide/vpc-nat-gateway.html) + + - Required for private subnet services to fetch external packages/updates + - Egress-only internet access for Lambda + - Cost-optimized single AZ deployment + +- **Internet Gateway:** [Learn more](https://docs.aws.amazon.com/vpc/latest/userguide/VPC_Internet_Gateway.html) + - Enables public access to API Gateway + +#### Services outside of VPC: +- **S3 Buckets:** [Learn more](https://aws.amazon.com/pm/serv-s3/?gclid=CjwKCAiAlPu9BhAjEiwA5NDSA1VjMbPPYbzEKHPHFwna4OblKvQe5sm9sigb9iHW69Zc_pxuRifGzxoCUiEQAvD_BwE&trk=936e5692-d2c9-4e52-a837-088366a7ac3f&sc_channel=ps&ef_id=CjwKCAiAlPu9BhAjEiwA5NDSA1VjMbPPYbzEKHPHFwna4OblKvQe5sm9sigb9iHW69Zc_pxuRifGzxoCUiEQAvD_BwE:G:s&s_kwcid=AL!4422!3!536324434071!e!!g!!s3!11346198420!112250793838) + - Accessed via NAT Gateway through Lambda functions + - No internet routing through NAT Gateway + + #### How objects in S3 are accessed: + + ![S3 Workflow Diagram](images/s3-workflow.png) + + The above diagram illustrates the use of S3 pre-signed URLs in our architecture. The process works as follows: + + 1. Client Request: The client first requests a pre-signed URL by making an API call to the Amazon API Gateway + + 2. Pre-Signed URL Generation: The API Gateway invokes an AWS Lambda function, which is responsible for generating the pre-signed URL. The Lambda function checks for the appropriate permissions (PutObject action) for the requested S3 bucket + + 3. Permission Validation: If permissions are validated, the Lambda function returns the generated pre-signed URL to the client + + 4. File Upload: The client uses this pre-signed URL to upload files directly to S3, bypassing the need for the server to handle large file uploads. This approach ensures: + + - Secure, time-limited access to the S3 bucket without exposing long-term credentials + + - Offloading file transfer workload from backend servers, reducing latency and cost + + + Learn More: + - [Sharing objects with presigned URLs](https://docs.aws.amazon.com/AmazonS3/latest/userguide/ShareObjectPreSignedURL.html) + + - [Download and upload objects with presigned URLs](https://docs.aws.amazon.com/AmazonS3/latest/userguide/using-presigned-url.html) + + + Additional security measures: + - All data is encrypted at rest using SSE-S3 (AES-256) + - Public access is blocked for all S3 buckets + - SSL connections are enforced for secure data transfer + +- **Amazon API Gateway:** + - Deployed in AWS public cloud space + - Protected by regional security controls + - Custom Lambda Authorizers validate user permissions before accessing endpoints + - Uses Cognito User Pools for authentication and role-based access control + - IAM policies restrict API Gateway access based on user roles + +- **Amazon Bedrock:** + - Requires explicit model access requests for utilization + - API interactions secured using IAM roles and encrypted connections + +- **AWS AppSync:** + - Provides real-time data queries and synchronizes data between clients and backend + - Integrated with IAM for authentication and runs in the public cloud space + - SQS queues with server-side encryption (SSE) enabled using AWS-managed keys + - Only specific Lambda functions are granted permissions to send or receive messages + +- **Amazon Cognito:** + - Provides authentication and authorization for Lambda access + - Role-based access control via IAM roles and policies + - Triggers (Pre-Sign-Up, Post-Confirmation, Post-Authentication) manage user provisioning. + - Secures real-time data sync via AppSync with Lambda authorizers + +- **Amazon SQS:** + - Facilitates real-time data synchronization and GraphQL APIs + - Integrated with Cognito for secure, authenticated access + - Employs server-side encryption using AWS-managed keys + - Connects to Lambda functions for data processing and custom business logic + - Configured with IAM roles to enforce least-privilege access control + - Supports secure WebSocket connections for live data updates + +- **Amazon ECR:** + - Lambda functions utilize Docker images stored in Amazon ECR + - Images are securely pulled over the internet via the NAT Gateway + + + +## 1.2 Security Configuration + +![Security Configuration](images/SecurityConfiguration.png) + + +This diagram illustrates how our architecture handles key security aspects by leveraging AWS services tailored for each security domain. Identity Services, including AWS IAM and Amazon Cognito, ensure secure authentication and access control. Data Protection is enforced through AWS Secrets Manager and AWS KMS for secure storage and encryption. Infrastructure Protection relies on AWS WAF, AWS Shield, and AWS Systems Manager to safeguard against threats. Detection Services such as Security Hub, Amazon GuardDuty, and Amazon CloudWatch provide continuous monitoring and threat detection + + + + +## 2. Security Controls + +### 2.1 Network Security + +**Security Groups:** + +| Name | Rules | +|---------------|-------------------------------------------------| +| Lambda-SG | Allow outbound: 5432 (RDS Proxy) | +| RDS-Proxy-SG | Allow inbound: 5432 from Lambda-SG | +| Default-SG | Block all inbound, allow outbound | + +**NACLs:** +- Default NACLs in use +- No custom rules - inherits Control Tower baseline: + - Inbound: ALLOW ALL + - Outbound: ALLOW ALL + + + +## 3. Data Store Security + +### 3.1 Encryption + +**Purpose:** Ensure all stored data is encrypted at rest to meet compliance and security standards + + +### 3.2 Access Controls + +#### RDS Proxy: +- IAM authentication required +- Connection pooling limits credential exposure +- Audit logs enabled via CloudWatch + + +## 4. Secrets & Parameters + +### 4.1 Credential Management + +**Purpose:** Securely manage sensitive credentials such as RDS passwords + +#### AWS Secrets Manager: +- Creates a new secret named DBSecret for RDS credentials +- Enhances security by regularly updating credentials without manual intervention + + + +## 5. Security Services + +### 5.1 AWS WAF & Shield + +**WAF Rules Applied:** [Learn more](https://docs.aws.amazon.com/AmazonCloudFront/latest/DeveloperGuide/distribution-web-awswaf.html) +- SQLi Protection (AWSManagedRulesSQLiRuleSet) +- XSS Protection (AWSManagedRulesXSSRuleSet) +- Request Threshold: 100 requests/min per IP + +**Shield Standard:** [Learn more](https://docs.aws.amazon.com/waf/latest/developerguide/ddos-overview.html) +- Enabled on API Gateway +- CloudWatch alarms for DDoS detection + + +### 5.2 Security Hub + +**Purpose:** Enable continuous security monitoring and automate compliance checks [Learn more](https://docs.aws.amazon.com/securityhub/latest/userguide/what-is-securityhub.html) + +#### Account-level monitoring recommendations: + +- Enable Security Hub in the AWS Management Console for the target region (e.g., ca-central-1) +- Integrate Security Hub with AWS services (e.g., GuardDuty) for comprehensive security analysis +- Use Security Hub Insights to identify and prioritize security issues across AWS accounts + +#### How to Use: + +- Navigate to Security Hub in the AWS console +- Review findings generated from AWS best practices and integrated security services +- Apply security standards like AWS Foundational Security Best Practices +- Use custom insights and filters (e.g., resources.tags.Project = "AILA") to focus on relevant resources +- Remediate issues based on the severity and compliance requirements + + +## 6. RDS Security + +### 6.1 RDS Encryption + +**Purpose:** Secure RDS data at rest using AWS KMS encryption and prevent accidental deletion + +- Enabled storage encryption for storageEncrypted is set to true +- Referenced an existing KMS key using kms.Key.fromKeyArn() for encryption +- deletionProtection is set to true to prevent unintended RDS deletions. + +### 6.2 RDS Security Groups + +**Purpose:** Control database access by allowing PostgreSQL traffic (5432) only from trusted CIDRs + +CDK Implementation: +```typescript +vpcStack.privateSubnetsCidrStrings.forEach((cidr) => { + dbSecurityGroup.addIngressRule( + ec2.Peer.ipv4(cidr), + ec2.Port.tcp(5432), + `Allow PostgreSQL traffic from ${cidr}` + ); +}); + +``` + +### 6.3 RDS Proxy + +**Purpose:** Purpose: Enhance RDS access performance, security, and scalability by utilizing Amazon RDS Proxy + + - IAM Authentication: RDS Proxy requires IAM authentication for secure access + - Connection Pooling: Efficiently manages and reuses database connections, reducing the load on RDS + - TLS Enforcement: Secure connections with optional TLS enforcement for data-in-transit encryption + - Role Management: IAM roles grant rds-db:connect permissions to trusted Lambda functions + - Fault Tolerance: Proxies automatically handle database failovers, improving application availability + - Security Groups: Configured to allow only trusted Lambda functions and services within private subnets to connect + + +## 7. S3 Security + +### Bucket Security Configurations + +**Purpose:** Ensure data confidentiality by encrypting S3 objects and blocking public access [Learn more](https://aws.amazon.com/s3/security/) + +- Enabled S3-managed encryption (S3_MANAGED) for data at rest +- Blocked all public access with blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL +- Enforced SSL connections for secure data transfer by setting enforceSSL: true + +CDK Implementation: +```typescript +blockPublicAccess: s3.BlockPublicAccess.BLOCK_ALL, +enforceSSL: true, // Enforce secure data transfer +encryption: s3.BucketEncryption.S3_MANAGED, +``` + +## 8. Security Group Configurations + +**Purpose:** Secure network access between AWS components, ensuring least-privilege access + +### 8.1 Key Security Group Controls in CDK: + +| **Component** | **CDK Location** | **Key Security Control** | **Purpose** | +|----------------|------------------------|---------------------------------------------------------------|--------------------------------------------------| +| **RDS** | `DatabaseStack` | PostgreSQL (5432) only from private/VPC CIDRs | Restricts DB access to internal networks | +| **Lambda** | `ApiGatewayStack` | IAM policies for Secrets, ENI management, and SQS access | Limits Lambda access to necessary resources | +| **SQS** | `ApiGatewayStack` | Lambda-only `SendMessage` access via `addToResourcePolicy` | Secures SQS queue from unauthorized access | +| **AppSync** | `ApiGatewayStack` | Lambda authorizer & `appsync:GraphQL` permissions | Ensures secure, authenticated access to GraphQL APIs | +| **RDS Proxy** | `DatabaseStack` | IAM-based `rds-db:connect` permissions | Adds an extra layer of security between Lambda and RDS | + + +### 8.2 Examples from CDK infrastructure where these security measures are implemented: + +#### Lambda Access to Secrets Manager: +```javascript +lambdaRole.addToPolicy( + new iam.PolicyStatement({ + actions: ["secretsmanager:GetSecretValue"], + resources: [`arn:aws:secretsmanager:${this.region}:${this.account}:secret:*`], + }) +); +``` + +#### SQS Queue Security (Lambda-Only Access): +``` +messagesQueue.addToResourcePolicy( + new iam.PolicyStatement({ + actions: ["sqs:SendMessage"], + principals: [new iam.ServicePrincipal("lambda.amazonaws.com")], + resources: [messagesQueue.queueArn], + }) +); +``` + +#### Private Subnet Access: Allows PostgreSQL traffic (port 5432) only from private subnet CIDRs: +```typescript +vpcStack.privateSubnetsCidrStrings.forEach((cidr) => { + dbSecurityGroup.addIngressRule( + ec2.Peer.ipv4(cidr), + ec2.Port.tcp(5432), + `Allow PostgreSQL traffic from private subnet CIDR range ${cidr}` + ); +}); +``` + + +#### Lambda Network Access: Enables Lambda to create network interfaces (ENIs) for VPC access +```typescript +this.dbInstance.connections.securityGroups.forEach((securityGroup) => { + securityGroup.addIngressRule( + ec2.Peer.ipv4(vpcStack.vpcCidrString), + ec2.Port.tcp(5432), + "Allow PostgreSQL traffic from public subnets" + ); +}); +``` + +#### Lambda Access to Secrets Manager: +```typescript +lambdaRole.addToPolicy( + new iam.PolicyStatement({ + effect: iam.Effect.ALLOW, + actions: ["secretsmanager:GetSecretValue"], + resources: [`arn:aws:secretsmanager:${this.region}:${this.account}:secret:*`], + }) +); +``` + + +#### SQS Queue Security (Lambda-Only Access): +```typescript +messagesQueue.addToResourcePolicy( + new iam.PolicyStatement({ + actions: ["sqs:SendMessage"], + principals: [new iam.ServicePrincipal("lambda.amazonaws.com")], + resources: [messagesQueue.queueArn], + }) +); +``` + +### 8.3 Lambda Function Access & Invocation + + +#### **Summary of Lambda Function Access** [Learn more](https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-pools-working-with-lambda-triggers.html#:~:text=Except%20for%20Custom%20sender%20Lambda,attempts%2C%20the%20function%20times%20out.): + +| **Lambda Function** | **Access Level** | **Trigger/Invocation** | **Who Can Access?** | +|-------------------------------------|-------------------|-------------------------------------------|--------------------------------------------| +| `studentFunction` | Private | student | Authenticated users in **student** group | +| `instructorFunction` | Private | instructor | Authenticated users in **instructor** group| +| `adminFunction` | Private | admin | Authenticated users in **admin** group | +| `preSignupLambda` | Private | Cognito **Pre-Sign-Up** trigger | **Cognito internal trigger** only | +| `addStudentOnSignUp` | Private | Cognito **Post-Confirmation** trigger | **Cognito internal trigger** only | +| `adjustUserRoles` | Private | Cognito **Post-Authentication** trigger | **Cognito internal trigger** only | +| `TextGenLambdaDockerFunc` | Private | student | **student** group users | +| `GeneratePreSignedURLFunc` | Private | instructor | **instructor** group users | +| `DataIngestLambdaDockerFunc` | Private | S3 Event (S3 PUT/DELETE) | Triggered by **S3 events** only | +| `GetFilesFunction` | Private | instructor | **instructor** group users | +| `DeleteFileFunc` | Private | instructor | **instructor** group users | +| `DeleteModuleFunc` | Private | instructor | **instructor** group users | +| `DeleteLastMessage` | Private | student | **student** group users | +| `adminLambdaAuthorizer` | Private | API Gateway Lambda Authorizer (admin) | Internal to **API Gateway** for auth checks| +| `studentLambdaAuthorizer` | Private | API Gateway Lambda Authorizer (student) | Internal to **API Gateway** for auth checks| +| `instructorLambdaAuthorizer` | Private | API Gateway Lambda Authorizer (instructor)| Internal to **API Gateway** for auth checks| + + + + +## 9. Cognito User Authentication + +### 9.1 Purpose + +AWS Cognito provides user authentication and authorization, enabling **secure access** to Lambda functions based on user roles. By integrating Cognito with Lambda, we ensure that **only authenticated users** with the **appropriate permissions** can invoke Lambda functions, maintaining the **principle of least privilege** + +[Learn more](https://docs.aws.amazon.com/cognito/latest/developerguide/authentication.html) + +--- + +### 9.2 How Cognito Secures Lambda Invocations + +- **User Pool Creation:** + Cognito **User Pools** manage user registration and sign-in + - Supports multi-role user groups (e.g., student, instructor, admin) + - Automatic verification of user credentials + +- **Role-Based Access Control (RBAC):** + Cognito assigns **IAM roles** based on user groups, allowing **fine-grained access control** to specific Lambda functions. + - Example roles: `StudentRole`, `InstructorRole`, `AdminRole` + - IAM policies attached to each role define permitted Lambda invocations + +- **Lambda Integration:** + Cognito-generated **JWT tokens** are validated by Lambda **authorizer functions** to ensure: + - Only **authorized users** can invoke specific Lambda endpoints + - **JWT tokens** expire 30 days after a user signs in [Learn more](https://docs.aws.amazon.com/cognito/latest/developerguide/amazon-cognito-user-pools-using-tokens-with-identity-providers.html) + - Access is logged and monitored via **CloudWatch** + +--- + + +### 9.3 Cognito Integration in CDK (ApiGatewayStack) + +#### **User Pool & App Client Configuration:** +```typescript +const userPool = new cognito.UserPool(this, 'UserPool', { + signInAliases: { email: true }, + selfSignUpEnabled: true, + userVerification: { emailStyle: cognito.VerificationEmailStyle.CODE }, + passwordPolicy: { minLength: 8, requireLowercase: true, requireUppercase: true }, +}); + +const appClient = userPool.addClient('AppClient', { + authFlows: { userPassword: true, userSrp: true }, +}); + +``` + +## 10 API Gateway Security + +### 10.1 Purpose + +AWS API Gateway acts as the entry point for clients, enabling secure, scalable, and managed API interactions. It integrates with AWS IAM, Cognito, and Lambda Authorizers to enforce authentication and authorization. + +### 10.2 Security Measures Applied: +- Cognito User Pools for authentication +- IAM Policies to enforce least-privilege access +- Lambda Authorizers for custom permission validation +- AWS WAF to mitigate DDoS attacks and malicious traffic + + +[Learn more](https://docs.aws.amazon.com/apigateway/latest/developerguide/security.html) + +### 10.3 Custom Lambda Authorizer for API Gateway + +Lambda Authorizers provide fine-grained access control by validating requests before they reach the API Gateway methods. This allows us to enforce custom authentication and authorization logic, such as role-based access control (RBAC) or JSON Web Token (JWT) validation + +```typescript +const lambdaAuthorizer = new apigateway.TokenAuthorizer(this, 'LambdaAuth', { + handler: myLambdaAuthorizer, + identitySource: 'method.request.header.Authorization', +}); + +const restrictedResource = api.root.addResource('restricted'); +restrictedResource.addMethod('POST', new apigateway.LambdaIntegration(myLambda), { + authorizationType: apigateway.AuthorizationType.CUSTOM, + authorizer: lambdaAuthorizer, +}); +``` + +**Key Features of Lambda Authorizer**: +- Custom Authentication: Uses a Lambda function to validate JWT tokens or other credentials before granting access +- Identity Source: Extracts authentication data from the Authorization header in HTTP requests +- Granular Access Control: Ensures that only authorized users can access specific API methods + + +## 12 AWS Systems Manager Security + + +### 12.1 Purpose + +AWS Systems Manager (SSM) provides centralized management, automation, and security enforcement for AWS resources, helping secure access to infrastructure and maintain compliance through patching, logging, and configuration management + +Amazon CloudWatch is used to monitor AWS resources, applications, and security logs. It plays a crucial role in performance monitoring [Learn more](https://aws.amazon.com/systems-manager/) + + +### 12.2 Security Measures: +- Parameter Store Access Logs: Monitors access to sensitive configuration data, including secrets and API keys +- Anomalous Parameter Store Access: Triggers alarms when unauthorized services or users attempt to retrieve sensitive parameters + + + + ## 13 AWS Key Management Service (KMS) + + + ### 13.1 Purpose + +AWS KMS (Key Management Service) provides centralized encryption key management, helping to keep sensitive data encrypted both at rest and in transit. It integrates with multiple AWS services to offer scalable and automated encryption security [Learn more](https://aws.amazon.com/kms/) + +### 13.2 Security Measures: +- KMS Key Access Logs: Records every use of an encryption key to help detect unauthorized decryption attempts +- Excessive Key Usage: Triggers alerts if a specific key is accessed more than usual, potentially indicating a compromise diff --git a/docs/userGuide.md b/docs/userGuide.md index ca941b4..10a4620 100644 --- a/docs/userGuide.md +++ b/docs/userGuide.md @@ -78,6 +78,12 @@ Clicking the "View Students" tab leads to a page where the instructor can view a The instructor can then click on a student which takes them to that student's chat logs for every module in the course. Each tab represents a different module. Different conversations with the LLM are rendered as different drop downs: ![image](./images/instructor-view-student-logs.png) + + +Clicking the "Chat History" tab directs the instructor to a dedicated page where they can request a document containing the chat logs for all course sessions. Once the chat log file has been successfully generated, it will be displayed on this page, complete with the generation timestamp and a download option, allowing the instructor to easily access and save the chat logs locally if desired. If the user navigates to another screen while the request is processing, they will receive a notification once the file is ready, indicating that the chat logs are available for download: +![image](./images/instructor-chat-history.png) + + ## Student View Upon logging in as an student, they see the following home page: ![image](./images/student-home-page.png) diff --git a/frontend/package-lock.json b/frontend/package-lock.json index 96536f3..8a12e59 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -17,7 +17,7 @@ "@aws-sdk/types": "^3.609.0", "@emotion/react": "^11.11.4", "@emotion/styled": "^11.11.5", - "@mui/icons-material": "^5.16.0", + "@mui/icons-material": "^5.16.13", "@mui/material": "^5.15.21", "@smithy/eventstream-codec": "^3.1.2", "@smithy/protocol-http": "^4.1.0", @@ -36,11 +36,12 @@ "react": "^18.3.1", "react-beautiful-dnd": "^13.1.1", "react-dom": "^18.3.1", - "react-icons": "^5.2.1", + "react-icons": "^4.9.0", "react-router-dom": "^6.24.1", "react-syntax-highlighter": "^15.5.0", "react-toastify": "^10.0.5", - "recharts": "^2.12.7" + "recharts": "^2.12.7", + "u": "^0.1.0" }, "devDependencies": { "@types/node": "^22.0.2", @@ -3981,9 +3982,10 @@ } }, "node_modules/@mui/icons-material": { - "version": "5.16.7", - "resolved": "https://registry.npmjs.org/@mui/icons-material/-/icons-material-5.16.7.tgz", - "integrity": "sha512-UrGwDJCXEszbDI7yV047BYU5A28eGJ79keTCP4cc74WyncuVrnurlmIRxaHL8YK+LI1Kzq+/JM52IAkNnv4u+Q==", + "version": "5.16.13", + "resolved": "https://registry.npmjs.org/@mui/icons-material/-/icons-material-5.16.13.tgz", + "integrity": "sha512-aWyOgGDEqj37m3K4F6qUfn7JrEccwiDynJtGQMFbxp94EqyGwO13TKcZ4O8aHdwW3tG63hpbION8KyUoBWI4JQ==", + "license": "MIT", "dependencies": { "@babel/runtime": "^7.23.9" }, @@ -3996,8 +3998,8 @@ }, "peerDependencies": { "@mui/material": "^5.0.0", - "@types/react": "^17.0.0 || ^18.0.0", - "react": "^17.0.0 || ^18.0.0" + "@types/react": "^17.0.0 || ^18.0.0 || ^19.0.0", + "react": "^17.0.0 || ^18.0.0 || ^19.0.0" }, "peerDependenciesMeta": { "@types/react": { @@ -10004,9 +10006,10 @@ } }, "node_modules/react-icons": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/react-icons/-/react-icons-5.3.0.tgz", - "integrity": "sha512-DnUk8aFbTyQPSkCfF8dbX6kQjXA9DktMeJqfjrg6cK9vwQVMxmcA3BfP4QoiztVmEHtwlTgLFsPuH2NskKT6eg==", + "version": "4.9.0", + "resolved": "https://registry.npmjs.org/react-icons/-/react-icons-4.9.0.tgz", + "integrity": "sha512-ijUnFr//ycebOqujtqtV9PFS7JjhWg0QU6ykURVHuL4cbofvRCf3f6GMn9+fBktEFQOIVZnuAYLZdiyadRQRFg==", + "license": "MIT", "peerDependencies": { "react": "*" } @@ -11114,6 +11117,14 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/u": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/u/-/u-0.1.0.tgz", + "integrity": "sha512-johjpjhqy9UE97vjDFq8kW6F1777vIz0i+AZXzisbW6yn6Aa8iX3V7bW4W6GzI7sbw3HjYuvN2yMx2k6QwX5ng==", + "engines": { + "node": "*" + } + }, "node_modules/ua-parser-js": { "version": "0.7.38", "resolved": "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.38.tgz", diff --git a/frontend/package.json b/frontend/package.json index 499ae1c..436a29c 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -19,7 +19,7 @@ "@aws-sdk/types": "^3.609.0", "@emotion/react": "^11.11.4", "@emotion/styled": "^11.11.5", - "@mui/icons-material": "^5.16.0", + "@mui/icons-material": "^5.16.13", "@mui/material": "^5.15.21", "@smithy/eventstream-codec": "^3.1.2", "@smithy/protocol-http": "^4.1.0", @@ -38,11 +38,12 @@ "react": "^18.3.1", "react-beautiful-dnd": "^13.1.1", "react-dom": "^18.3.1", - "react-icons": "^5.2.1", + "react-icons": "^4.9.0", "react-router-dom": "^6.24.1", "react-syntax-highlighter": "^15.5.0", "react-toastify": "^10.0.5", - "recharts": "^2.12.7" + "recharts": "^2.12.7", + "u": "^0.1.0" }, "devDependencies": { "@types/node": "^22.0.2", diff --git a/frontend/src/App.jsx b/frontend/src/App.jsx index 26930a2..2136f16 100644 --- a/frontend/src/App.jsx +++ b/frontend/src/App.jsx @@ -20,6 +20,7 @@ import StudentChat from "./pages/student/StudentChat"; import AdminHomepage from "./pages/admin/AdminHomepage"; import InstructorHomepage from "./pages/instructor/InstructorHomepage"; import CourseView from "./pages/student/CourseView"; +import { NotificationProvider } from "./context/NotificationContext"; export const UserContext = createContext(); @@ -86,41 +87,43 @@ function App() { }; return ( - - - - : } - /> - - } - /> - - } - /> - - } /> - - - + + + + + : } + /> + + } + /> + + } + /> + + } /> + + + + ); } diff --git a/frontend/src/context/NotificationContext.jsx b/frontend/src/context/NotificationContext.jsx new file mode 100644 index 0000000..6c7abda --- /dev/null +++ b/frontend/src/context/NotificationContext.jsx @@ -0,0 +1,25 @@ +import React, { createContext, useState, useContext } from "react"; + +const NotificationContext = createContext(); + +export const NotificationProvider = ({ children }) => { + const [notifications, setNotifications] = useState({}); + + const setNotificationForCourse = (courseId, hasNotification) => { + setNotifications((prev) => ({ ...prev, [courseId]: hasNotification })); + }; + + return ( + + {children} + + ); +}; + +export const useNotification = () => { + const context = useContext(NotificationContext); + if (!context) { + throw new Error("useNotification must be used within a NotificationProvider"); + } + return context; +}; \ No newline at end of file diff --git a/frontend/src/pages/Login.jsx b/frontend/src/pages/Login.jsx index a2245e4..6e76328 100644 --- a/frontend/src/pages/Login.jsx +++ b/frontend/src/pages/Login.jsx @@ -126,90 +126,92 @@ export const Login = () => { } }; - // user signs up const handleSignUp = async (event) => { event.preventDefault(); - if ( - username == "" || - password == "" || - confirmPassword == "" || - firstName == "" || - lastName == "" - ) { + + // Check for empty fields + if (!username || !password || !confirmPassword || !firstName || !lastName) { toast.error("All fields are required", { position: "top-center", autoClose: 3000, - hideProgressBar: false, - closeOnClick: true, - pauseOnHover: true, - draggable: true, - progress: undefined, theme: "colored", }); return; } - // password specifications + + // Password validation: match, length, uppercase, lowercase, and number if (password !== confirmPassword) { setPasswordError("Passwords do not match"); - toast.error("Passwords do not match", { - position: "top-center", - autoClose: 3000, - hideProgressBar: false, - closeOnClick: true, - pauseOnHover: true, - draggable: true, - progress: undefined, - theme: "colored", - }); + toast.error("Passwords do not match", { theme: "colored" }); return; - } else if (password.length < 8) { + } + + if (password.length < 8) { setPasswordError("Password must be at least 8 characters long"); - toast.error("Password must be at least 8 characters long", { - position: "top-center", - autoClose: 3000, - hideProgressBar: false, - closeOnClick: true, - pauseOnHover: true, - draggable: true, - progress: undefined, - theme: "colored", - }); + toast.error("Password must be at least 8 characters long", { theme: "colored" }); return; } - setPasswordError(""); + + if (!/[a-z]/.test(password)) { + setPasswordError("Password must contain at least one lowercase letter"); + toast.error("Password must contain at least one lowercase letter", { theme: "colored" }); + return; + } + + if (!/[A-Z]/.test(password)) { + setPasswordError("Password must contain at least one uppercase letter"); + toast.error("Password must contain at least one uppercase letter", { theme: "colored" }); + return; + } + + if (!/[0-9]/.test(password)) { + setPasswordError("Password must contain at least one number"); + toast.error("Password must contain at least one number", { theme: "colored" }); + return; + } + + setPasswordError(""); // Clear any previous errors + try { setLoading(true); - const { isSignUpComplete, userId, nextStep } = await signUp({ + console.log("signing up"); + + const { isSignUpComplete, nextStep } = await signUp({ username: username, password: password, attributes: { email: username, }, }); + + console.log("signed up successfully:", isSignUpComplete, nextStep); + setNewSignUp(false); - if (!isSignUpComplete) { - if (nextStep.signUpStep === "CONFIRM_SIGN_UP") { - setSignUpConfirmation(true); - setLoading(false); - } + if (!isSignUpComplete && nextStep?.signUpStep === "CONFIRM_SIGN_UP") { + setSignUpConfirmation(true); // Transition to confirmation UI + toast.success("Account created. Check your email for the confirmation code.", { + theme: "colored", + }); } } catch (error) { - toast.error(`Error signing up: ${error}`, { + const errorMessage = + error.message.includes("PreSignUp failed with error") + ? "Your email domain is not allowed. Please use a valid email address." + : `Error signing up: ${error.message}`; + toast.error(errorMessage, { position: "top-center", autoClose: 3000, - hideProgressBar: false, - closeOnClick: true, - pauseOnHover: true, - draggable: true, - progress: undefined, theme: "colored", }); console.log("Error signing up:", error); setLoading(false); setError(error.message); + } finally { + setLoading(false); } }; + // user gets new password const handleNewUserPassword = async (event) => { event.preventDefault(); @@ -281,8 +283,7 @@ export const Login = () => { const token = session.tokens.idToken const response = await fetch( - `${ - import.meta.env.VITE_API_ENDPOINT + `${import.meta.env.VITE_API_ENDPOINT }student/create_user?user_email=${encodeURIComponent( username )}&username=${encodeURIComponent( @@ -535,7 +536,7 @@ export const Login = () => { )} {newSignUp && ( - + { display: "flex", flexDirection: "column", alignItems: "center", + margin: "0 auto", // Center the content horizontally + justifyContent: "center", // Center the content vertically + }} > @@ -764,123 +768,157 @@ export const Login = () => { )} {/* forgot password? */} {!loading && forgotPassword && ( - + - + {/* Title */} + Reset Password + + {/* Request Reset */} {step === "requestReset" && ( <> - - setUsername(e.target.value)} - fullWidth - margin="normal" - inputProps={{ maxLength: 40 }} - /> - + setUsername(e.target.value)} + fullWidth + margin="normal" + inputProps={{ maxLength: 40 }} + sx={{ + fontSize: "1rem", // Ensure input matches font size + }} + /> )} + + {/* Confirm Reset */} {step === "confirmReset" && ( - - + setConfirmationCode(e.target.value)} + fullWidth + margin="normal" + inputProps={{ maxLength: 15 }} + sx={{ fontSize: "1rem" }} + /> + setNewPassword(e.target.value)} + fullWidth + margin="normal" + inputProps={{ maxLength: 50 }} + sx={{ fontSize: "1rem" }} + /> + - - + Reset Password + + )} + + {/* Success Message */} {step === "done" && ( - + Password has been successfully reset. )} + + {/* Error Message */} {error && ( - + {error} )} - - - setForgotPassword(false)} - > - Remember your Password? {"Sign in"} - - - + + {/* Remember Password Link */} + setForgotPassword(false)} + sx={{ + mt: 3, + textAlign: "center", + display: "block", + fontSize: "1rem", + fontWeight: "bold", + color: "primary.main", // Match link color + }} + > + Remember your Password? Sign in + )} + + { + if (index === 0) { + return word.toUpperCase(); + } else { + return word.charAt(0).toUpperCase() + word.slice(1).toLowerCase(); + } + }) + .join(" "); +} + +export const ChatLogs = ({ courseName, course_id, openWebSocket }) => { + const [loading, setLoading] = useState(false); + const [isDownloadButtonEnabled, setIsDownloadButtonEnabled] = useState(false); + const [previousChatLogs, setPreviousChatLogs] = useState([]); + const { setNotificationForCourse } = useNotification(); + + useEffect(() => { + checkNotificationStatus(); + fetchChatLogs(); + + // Auto-refresh logs every 5 minutes since presigned URLs expire + const interval = setInterval(fetchChatLogs, 5 * 60 * 1000); + return () => clearInterval(interval); + }, [course_id]); + + const checkNotificationStatus = async () => { + try { + const session = await fetchAuthSession(); + const token = session.tokens.idToken; + const { email } = await fetchUserAttributes(); + const response = await fetch( + `${import.meta.env.VITE_API_ENDPOINT + }instructor/check_notifications_status?course_id=${encodeURIComponent( + course_id + )}&instructor_email=${encodeURIComponent(email)}`, + { + method: "GET", + headers: { + Authorization: token, + "Content-Type": "application/json", + }, + } + ); + if (response.ok) { + const data = await response.json(); + console.log(`Download Chatlogs is ${data.isEnabled}`) + setIsDownloadButtonEnabled(data.isEnabled); + } else { + console.error("Failed to fetch notification status:", response.statusText); + } + } catch (error) { + console.error("Error checking notification status:", error); + } + }; + + const fetchChatLogs = async () => { + try { + setLoading(true); + const session = await fetchAuthSession(); + const token = session.tokens.idToken; + const { email } = await fetchUserAttributes(); + + const response = await fetch( + `${import.meta.env.VITE_API_ENDPOINT}instructor/fetch_chatlogs?course_id=${encodeURIComponent(course_id)}&instructor_email=${encodeURIComponent(email)}`, + { + method: "GET", + headers: { + Authorization: token, + "Content-Type": "application/json", + }, + } + ); + + if (response.ok) { + const data = await response.json(); + console.log("Chat logs fetched:", data); + if (data.log_files) { + const formattedLogs = Object.entries(data.log_files).map(([fileName, presignedUrl]) => ({ + date: convertToLocalTime(fileName), // Using file name as the date + presignedUrl: presignedUrl, + })); + setPreviousChatLogs(formattedLogs); + } else { + setPreviousChatLogs([]); + } + } else { + console.error("Failed to fetch chat logs:", response.statusText); + } + } catch (error) { + console.error("Error fetching chat logs:", error); + } finally { + setLoading(false); + } + }; + + const convertToLocalTime = (fileName) => { + try { + // Extract timestamp from file name (assuming format: "YYYY-MM-DD HH:MM:SS.csv") + const match = fileName.match(/(\d{4}-\d{2}-\d{2}) (\d{2}):(\d{2}):(\d{2})/); + if (!match) { + console.warn("Could not extract a valid timestamp from filename:", fileName); + return fileName; // Return original name if no timestamp found + } + + // Extract date components + const [_, datePart, hours, minutes, seconds] = match; + const [year, month, day] = datePart.split("-").map(Number); + + // Create a new Date object with UTC time + const utcDate = new Date(Date.UTC(year, month - 1, day, hours, minutes, seconds)); + + // Convert to user's local time + return utcDate.toLocaleString(undefined, { timeZoneName: "short" }); + + } catch (error) { + console.error("Error converting time:", error); + return fileName; // Fallback in case of error + } + }; + + + + const downloadChatLog = (presignedUrl) => { + try { + console.log("Downloading file from:", presignedUrl); + window.open(presignedUrl, "_blank"); + } catch (error) { + console.error("Error downloading file:", error); + } + }; + + const generateCourseMessages = async () => { + try { + console.log("openWebSocket function:", openWebSocket); + if (typeof openWebSocket !== "function") { + console.error("Error: openWebSocket is not a function!"); + return; + } + setIsDownloadButtonEnabled(false); + const session = await fetchAuthSession(); + const token = session.tokens.idToken; + const { email } = await fetchUserAttributes(); + const request_id = uuidv4(); + + const response = await fetch( + `${import.meta.env.VITE_API_ENDPOINT}instructor/course_messages`, + { + method: "POST", + headers: { + Authorization: token, + "Content-Type": "application/json", + }, + body: JSON.stringify({ + course_id: course_id, + instructor_email: email, + request_id: request_id, + }), + } + ); + + if (response.ok) { + console.log(response) + const data = await response.json(); + console.log("Job submitted successfully:", data); + + // Invoke global WebSocket function from InstructorHomepage and delay checkNotificationStatus slightly + openWebSocket(courseName, course_id, request_id, setNotificationForCourse, () => { + console.log("Waiting before checking notification status..."); + setTimeout(() => { + checkNotificationStatus(); + fetchChatLogs(); // Fetch latest chat logs after WebSocket completes + }, 2000); // Wait 2 seconds before checking + }); + } else { + console.error("Failed to submit job:", response.statusText); + } + } catch (error) { + console.error("Error submitting job:", error); + } + }; + + + return ( +
+ + + + + {courseName} Chat Logs + + + + + + + {loading ? ( + + Loading chat logs... + + ) : null } + {!loading && previousChatLogs.length > 0 && ( + + + + + Date + Download + + + + {previousChatLogs.map((log, index) => ( + + {log.date} + + + + + ))} + +
+
+ )} +
+ +
+
+ ); +}; + +export default ChatLogs; \ No newline at end of file diff --git a/frontend/src/pages/instructor/CourseDetails.jsx b/frontend/src/pages/instructor/CourseDetails.jsx index d11c064..3600b5e 100644 --- a/frontend/src/pages/instructor/CourseDetails.jsx +++ b/frontend/src/pages/instructor/CourseDetails.jsx @@ -11,7 +11,7 @@ import ViewStudents from "./ViewStudents"; import InstructorModules from "./InstructorModules"; // course details page -const CourseDetails = () => { +const CourseDetails = ({ openWebSocket }) => { const { courseId } = useParams(); const [selectedComponent, setSelectedComponent] = useState( "InstructorAnalytics" @@ -66,6 +66,8 @@ const CourseDetails = () => { return ; case "ViewStudents": return ; + case "ChatLogs": + return ; default: return ( @@ -81,7 +83,7 @@ const CourseDetails = () => { > - + {renderComponent()} ); diff --git a/frontend/src/pages/instructor/InstructorAnalytics.jsx b/frontend/src/pages/instructor/InstructorAnalytics.jsx index c44b902..c9569e8 100644 --- a/frontend/src/pages/instructor/InstructorAnalytics.jsx +++ b/frontend/src/pages/instructor/InstructorAnalytics.jsx @@ -227,4 +227,4 @@ const InstructorAnalytics = ({ courseName, course_id }) => { ); }; -export default InstructorAnalytics; +export default InstructorAnalytics; \ No newline at end of file diff --git a/frontend/src/pages/instructor/InstructorConcepts.jsx b/frontend/src/pages/instructor/InstructorConcepts.jsx index 429d9f8..ffb0c04 100644 --- a/frontend/src/pages/instructor/InstructorConcepts.jsx +++ b/frontend/src/pages/instructor/InstructorConcepts.jsx @@ -116,13 +116,13 @@ const InstructorConcepts = ({ courseName, course_id }) => { }); const handleEditClick = (conceptData) => { - navigate(`/course/${courseName}/edit-concept/${conceptData.concept_id}`, { + navigate(`/course/${courseName}/${course_id}/edit-concept/${conceptData.concept_id}`, { state: { conceptData, course_id: course_id }, }); }; const handleCreateConceptClick = () => { - navigate(`/course/${courseName}/new-concept`, { + navigate(`/course/${courseName}/${course_id}/new-concept`, { state: { data, course_id }, }); }; diff --git a/frontend/src/pages/instructor/InstructorEditCourse.jsx b/frontend/src/pages/instructor/InstructorEditCourse.jsx index 79634c0..6747ed4 100644 --- a/frontend/src/pages/instructor/InstructorEditCourse.jsx +++ b/frontend/src/pages/instructor/InstructorEditCourse.jsx @@ -98,8 +98,7 @@ const InstructorEditCourse = () => { try { const { token, email } = await getAuthSessionAndEmail(); const response = await fetch( - `${ - import.meta.env.VITE_API_ENDPOINT + `${import.meta.env.VITE_API_ENDPOINT }instructor/get_all_files?course_id=${encodeURIComponent( course_id )}&module_id=${encodeURIComponent( @@ -129,8 +128,7 @@ const InstructorEditCourse = () => { try { const { token, email } = await getAuthSessionAndEmail(); const response = await fetch( - `${ - import.meta.env.VITE_API_ENDPOINT + `${import.meta.env.VITE_API_ENDPOINT }instructor/view_concepts?course_id=${encodeURIComponent(course_id)}`, { method: "GET", @@ -170,8 +168,7 @@ const InstructorEditCourse = () => { const session = await fetchAuthSession(); const token = session.tokens.idToken const s3Response = await fetch( - `${ - import.meta.env.VITE_API_ENDPOINT + `${import.meta.env.VITE_API_ENDPOINT }instructor/delete_module_s3?course_id=${encodeURIComponent( course_id )}&module_id=${encodeURIComponent( @@ -190,8 +187,7 @@ const InstructorEditCourse = () => { throw new Error("Failed to delete module from S3"); } const moduleResponse = await fetch( - `${ - import.meta.env.VITE_API_ENDPOINT + `${import.meta.env.VITE_API_ENDPOINT }instructor/delete_module?module_id=${encodeURIComponent( module.module_id )}`, @@ -260,8 +256,7 @@ const InstructorEditCourse = () => { const { token, email } = await getAuthSessionAndEmail(); const editModuleResponse = await fetch( - `${ - import.meta.env.VITE_API_ENDPOINT + `${import.meta.env.VITE_API_ENDPOINT }instructor/edit_module?module_id=${encodeURIComponent( module.module_id )}&instructor_email=${encodeURIComponent( @@ -291,8 +286,7 @@ const InstructorEditCourse = () => { const fileType = getFileType(file_name); const fileName = cleanFileName(removeFileExtension(file_name)); return fetch( - `${ - import.meta.env.VITE_API_ENDPOINT + `${import.meta.env.VITE_API_ENDPOINT }instructor/delete_file?course_id=${encodeURIComponent( course_id )}&module_id=${encodeURIComponent( @@ -325,8 +319,7 @@ const InstructorEditCourse = () => { try { const response = await fetch( - `${ - import.meta.env.VITE_API_ENDPOINT + `${import.meta.env.VITE_API_ENDPOINT }instructor/generate_presigned_url?course_id=${encodeURIComponent( course_id )}&module_id=${encodeURIComponent( @@ -380,6 +373,18 @@ const InstructorEditCourse = () => { if (isSaving) return; setIsSaving(true); + + const totalFiles = files.length + newFiles.length; + if (totalFiles === 0) { + toast.error("At least one file is required to save the module.", { + position: "top-center", + autoClose: 2000, + theme: "colored", + }); + setIsSaving(false); + return; + } + if (!moduleName || !concept) { toast.error("Module Name and Concept are required.", { position: "top-center", @@ -393,6 +398,8 @@ const InstructorEditCourse = () => { }); return; } + + try { await updateModule(); const { token } = await getAuthSessionAndEmail(); @@ -445,8 +452,7 @@ const InstructorEditCourse = () => { ); const fileType = getFileType(fileNameWithExtension); return fetch( - `${ - import.meta.env.VITE_API_ENDPOINT + `${import.meta.env.VITE_API_ENDPOINT }instructor/update_metadata?module_id=${encodeURIComponent( module.module_id )}&filename=${encodeURIComponent( diff --git a/frontend/src/pages/instructor/InstructorHomepage.jsx b/frontend/src/pages/instructor/InstructorHomepage.jsx index 8f89926..d5d0122 100644 --- a/frontend/src/pages/instructor/InstructorHomepage.jsx +++ b/frontend/src/pages/instructor/InstructorHomepage.jsx @@ -1,4 +1,4 @@ -import React, { useState, useEffect, useContext } from "react"; +import React, { useState, useEffect, useContext, useRef } from "react"; import { Routes, Route, @@ -24,6 +24,7 @@ import { TablePagination, Button, } from "@mui/material"; +import { v4 as uuidv4 } from 'uuid'; import PageContainer from "../Container"; import InstructorHeader from "../../components/InstructorHeader"; import InstructorSidebar from "./InstructorSidebar"; @@ -37,7 +38,10 @@ import StudentDetails from "./StudentDetails"; import InstructorNewConcept from "./InstructorNewConcept"; import InstructorConcepts from "./InstructorConcepts"; import InstructorEditConcept from "./InstructorEditConcept"; +import ChatLogs from "./ChatLogs"; +import { useNotification } from "../../context/NotificationContext"; import { UserContext } from "../../App"; + function titleCase(str) { if (typeof str !== "string") { return str; @@ -51,14 +55,141 @@ function titleCase(str) { .join(" "); } +function constructWebSocketUrl() { + const tempUrl = import.meta.env.VITE_GRAPHQL_WS_URL; // Replace with your WebSocket URL + const apiUrl = tempUrl.replace("https://", "wss://"); + const urlObj = new URL(apiUrl); + const tmpObj = new URL(tempUrl); + const modifiedHost = urlObj.hostname.replace( + "appsync-api", + "appsync-realtime-api" + ); + + urlObj.hostname = modifiedHost; + const host = tmpObj.hostname; + const header = { + host: host, + Authorization: `API_KEY=${import.meta.env.VITE_API_KEY}`, + }; + + const encodedHeader = btoa(JSON.stringify(header)); + const payload = "e30="; + + return `${urlObj.toString()}?header=${encodedHeader}&payload=${payload}`; +}; + +const removeCompletedNotification = async (course_id) => { + try { + console.log(course_id) + const session = await fetchAuthSession(); + const token = session.tokens.idToken; + const { email } = await fetchUserAttributes(); + const response = await fetch( + `${import.meta.env.VITE_API_ENDPOINT}instructor/remove_completed_notification?course_id=${encodeURIComponent(course_id)}&instructor_email=${encodeURIComponent(email)}`, + { + method: "DELETE", + headers: { Authorization: token, "Content-Type": "application/json" }, + } + ); + + if (response.ok) { + console.log("Notification removed successfully."); + } else { + console.error("Failed to remove notification:", response.statusText); + } + } catch (error) { + console.error("Error removing completed notification:", error); + } +}; + +function openWebSocket(courseName, course_id, requestId, setNotificationForCourse, onComplete) { + // Open WebSocket connection + const wsUrl = constructWebSocketUrl(); + const ws = new WebSocket(wsUrl, "graphql-ws"); + + // Handle WebSocket connection + ws.onopen = () => { + console.log("WebSocket connection established"); + + // Initialize WebSocket connection + const initMessage = { type: "connection_init" }; + ws.send(JSON.stringify(initMessage)); + + // Subscribe to notifications + const subscriptionId = uuidv4(); + const subscriptionMessage = { + id: subscriptionId, + type: "start", + payload: { + data: `{"query":"subscription OnNotify($request_id: String!) { onNotify(request_id: $request_id) { message request_id } }","variables":{"request_id":"${requestId}"}}`, + extensions: { + authorization: { + Authorization: `API_KEY=${import.meta.env.VITE_API_KEY}`, + host: new URL(import.meta.env.VITE_GRAPHQL_WS_URL).hostname, + }, + }, + }, + }; + + ws.send(JSON.stringify(subscriptionMessage)); + console.log("Subscribed to WebSocket notifications"); + }; + + ws.onmessage = (event) => { + const message = JSON.parse(event.data); + console.log("WebSocket message received:", message); + + // Handle notification + if (message.type === "data" && message.payload?.data?.onNotify) { + const receivedMessage = message.payload.data.onNotify.message; + console.log("Notification received:", receivedMessage); + + // Sets icon to show new file on ChatLogs page + setNotificationForCourse(course_id, true); + + // Remove row from database + removeCompletedNotification(course_id); + + // Notify the instructor + alert(`Chat logs are now available for ${courseName}`); + + // Close WebSocket after receiving the notification + ws.close(); + console.log("WebSocket connection closed after handling notification"); + + // Call the callback function after WebSocket completes + if (typeof onComplete === "function") { + onComplete(); + } + } + }; + + ws.onerror = (error) => { + console.error("WebSocket error:", error); + ws.close(); + }; + + ws.onclose = () => { + console.log("WebSocket closed"); + }; + + // Set a timeout to close the WebSocket if no message is received + setTimeout(() => { + if (ws && ws.readyState === WebSocket.OPEN) { + console.warn("WebSocket timeout reached, closing connection"); + ws.close(); + } + }, 180000); +}; + // course details page const CourseDetails = () => { const location = useLocation(); - const { courseName } = useParams(); const [selectedComponent, setSelectedComponent] = useState( "InstructorAnalytics" ); - const { course_id } = location.state; + + const { courseName, course_id } = useParams(); const renderComponent = () => { switch (selectedComponent) { @@ -72,12 +203,18 @@ const CourseDetails = () => { ); case "InstructorEditConcepts": return ( - + ); case "PromptSettings": return ; case "ViewStudents": return ; + case "ChatLogs": + return ; default: return ( @@ -85,6 +222,7 @@ const CourseDetails = () => { } }; + return ( { > - + {renderComponent()} ); @@ -112,8 +250,10 @@ const InstructorHomepage = () => { const [searchQuery, setSearchQuery] = useState(""); const [page, setPage] = useState(0); const [rowsPerPage, setRowsPerPage] = useState(5); - const [courseData, setCourseData] = useState([]); + const [courseData, setCourseData] = useState([]); const { isInstructorAsStudent } = useContext(UserContext); + const { setNotificationForCourse } = useNotification(); + const hasFetched = useRef(false); const navigate = useNavigate(); useEffect(() => { @@ -123,14 +263,15 @@ const InstructorHomepage = () => { }, [isInstructorAsStudent, navigate]); // connect to api data useEffect(() => { + if (hasFetched.current) return; + const fetchCourses = async () => { try { const session = await fetchAuthSession(); - var token = session.tokens.idToken + var token = session.tokens.idToken; const { email } = await fetchUserAttributes(); const response = await fetch( - `${ - import.meta.env.VITE_API_ENDPOINT + `${import.meta.env.VITE_API_ENDPOINT }instructor/courses?email=${encodeURIComponent(email)}`, { method: "GET", @@ -150,6 +291,7 @@ const InstructorHomepage = () => { id: course.course_id, })); setRows(formattedData); + checkNotificationStatus(data, email, token); } else { console.error("Failed to fetch courses:", response.statusText); } @@ -159,8 +301,47 @@ const InstructorHomepage = () => { }; fetchCourses(); + hasFetched.current = true; }, []); + const checkNotificationStatus = async (courses, email, token) => { + for (const course of courses) { + try { + const response = await fetch( + `${import.meta.env.VITE_API_ENDPOINT}instructor/check_notifications_status?course_id=${encodeURIComponent(course.course_id)}&instructor_email=${encodeURIComponent(email)}`, + { + method: "GET", + headers: { Authorization: token, "Content-Type": "application/json" }, + } + ); + if (response.ok) { + const data = await response.json(); + if (data.completionStatus === true) { + console.log(`Getting chatlogs for ${course.course_name} is completed. Notifying the user and removing row from database.`); + + // Sets icon to show new file on ChatLogs page + setNotificationForCourse(course.course_id, true); + + // Remove row from database + removeCompletedNotification(course.course_id); + + // Notify the Instructor + alert(`Chat logs are available for course: ${course.course_name}`); + + } else if (data.completionStatus === false) { + // Reopen WebSocket to listen for notifications + console.log(`Getting chatlogs for ${course.course_name} is not completed. Re-opening the websocket.`); + openWebSocket(course.course_name, course.course_id, data.requestId, setNotificationForCourse); + } else { + console.log(`Either chatlogs for ${course.course_name} were not requested or instructor already received notification. No need to notify instructor or re-open websocket.`); + } + } + } catch (error) { + console.error("Error checking notification status for", course.course_id, error); + } + } + }; + const handleSearchChange = (event) => { setSearchQuery(event.target.value); }; @@ -185,8 +366,10 @@ const InstructorHomepage = () => { if (course) { const { course_id, course_department, course_number } = course; - const path = `/course/${course_department} ${course_number} ${courseName.trim()}`; - navigate(path, { state: { course_id } }); + // After – include course_id as a URL parameter + const path = `/course/${encodeURIComponent(`${course_department} ${course_number} ${courseName.trim()}`)}/${course_id}`; + navigate(path); + } else { console.error("Course not found!"); } @@ -230,8 +413,13 @@ const InstructorHomepage = () => { onChange={handleSearchChange} sx={{ width: "100%", marginBottom: 2 }} /> - + @@ -293,26 +481,16 @@ const InstructorHomepage = () => { } /> - } /> - } - /> - } - /> - } /> - } - /> - } - /> + + } /> + } /> + } /> + } /> + } /> + } /> + ); }; -export default InstructorHomepage; +export default InstructorHomepage; \ No newline at end of file diff --git a/frontend/src/pages/instructor/InstructorModules.jsx b/frontend/src/pages/instructor/InstructorModules.jsx index 45f040d..a11f673 100644 --- a/frontend/src/pages/instructor/InstructorModules.jsx +++ b/frontend/src/pages/instructor/InstructorModules.jsx @@ -122,13 +122,13 @@ const InstructorModules = ({ courseName, course_id }) => { }, [course_id]); const handleEditClick = (moduleData) => { - navigate(`/course/${courseName}/edit-module/${moduleData.module_id}`, { + navigate(`/course/${courseName}/${course_id}/edit-module/${moduleData.module_id}`, { state: { moduleData, course_id: course_id }, }); }; const handleCreateModuleClick = () => { - navigate(`/course/${courseName}/new-module`, { + navigate(`/course/${courseName}/${course_id}/new-module`, { state: { data, course_id }, }); }; diff --git a/frontend/src/pages/instructor/InstructorNewModule.jsx b/frontend/src/pages/instructor/InstructorNewModule.jsx index ae23748..2e73a8a 100644 --- a/frontend/src/pages/instructor/InstructorNewModule.jsx +++ b/frontend/src/pages/instructor/InstructorNewModule.jsx @@ -160,6 +160,21 @@ export const InstructorNewModule = ({ courseId }) => { return; } + // Check if at least one file is uploaded + if (newFiles.length === 0) { + toast.error("At least one file must be uploaded.", { + position: "top-center", + autoClose: 2000, + hideProgressBar: false, + closeOnClick: true, + pauseOnHover: true, + draggable: true, + theme: "colored", + }); + return; + } + + setIsSaving(true); const selectedConcept = allConcepts.find((c) => c.concept_name === concept); diff --git a/frontend/src/pages/instructor/InstructorSidebar.jsx b/frontend/src/pages/instructor/InstructorSidebar.jsx index bd8a485..e813037 100644 --- a/frontend/src/pages/instructor/InstructorSidebar.jsx +++ b/frontend/src/pages/instructor/InstructorSidebar.jsx @@ -1,4 +1,4 @@ -import React from "react"; +import React, { useEffect, useState } from "react"; import { useNavigate } from "react-router-dom"; // MUI import { @@ -9,21 +9,28 @@ import { ListItemText, Divider, Box, + Badge, } from "@mui/material"; import HomeIcon from "@mui/icons-material/Home"; import ViewTimelineIcon from "@mui/icons-material/ViewTimeline"; import EditIcon from "@mui/icons-material/Edit"; import PsychologyIcon from "@mui/icons-material/Psychology"; import GroupIcon from "@mui/icons-material/Group"; +import DescriptionIcon from "@mui/icons-material/Description"; +import { useNotification } from "../../context/NotificationContext"; -const InstructorSidebar = ({ setSelectedComponent }) => { +const InstructorSidebar = ({ setSelectedComponent, course_id, selectedComponent }) => { const navigate = useNavigate(); + const { notifications, setNotificationForCourse } = useNotification(); const handleNavigation = (component) => { if (component === "InstructorAllCourses") { navigate("/home"); } else { setSelectedComponent(component); + if (component === "ChatLogs") { + setNotificationForCourse(course_id, false); + } } }; @@ -92,10 +99,23 @@ const InstructorSidebar = ({ setSelectedComponent }) => { + + handleNavigation("ChatLogs")}> + + + + + + + ); }; -export default InstructorSidebar; +export default InstructorSidebar; \ No newline at end of file diff --git a/frontend/src/pages/instructor/ViewStudents.jsx b/frontend/src/pages/instructor/ViewStudents.jsx index 9709ece..df05876 100644 --- a/frontend/src/pages/instructor/ViewStudents.jsx +++ b/frontend/src/pages/instructor/ViewStudents.jsx @@ -17,6 +17,7 @@ import { import { useState, useEffect } from "react"; import { useNavigate } from "react-router-dom"; import { fetchAuthSession, fetchUserAttributes } from "aws-amplify/auth"; +import { v4 as uuidv4 } from 'uuid'; // populate with dummy data const createData = (name, email) => { @@ -63,13 +64,13 @@ export const ViewStudents = ({ courseName, course_id }) => { const [accessCode, setAccessCode] = useState("loading..."); const navigate = useNavigate(); - const [allMessageData, setAllMessageData] = useState([]); useEffect(() => { const fetchCode = async () => { try { const session = await fetchAuthSession(); var token = session.tokens.idToken; + console.log(course_id) const response = await fetch( `${ import.meta.env.VITE_API_ENDPOINT @@ -97,11 +98,14 @@ export const ViewStudents = ({ courseName, course_id }) => { fetchCode(); }, [course_id]); + // retrieve analytics data useEffect(() => { const fetchStudents = async () => { try { + console.log("checkpoint1") const session = await fetchAuthSession(); + console.log("checkpoint2") var token = session.tokens.idToken; const response = await fetch( `${ @@ -136,63 +140,14 @@ export const ViewStudents = ({ courseName, course_id }) => { fetchStudents(); }, []); - - const fetchCourseMessages = async () => { - try { - const session = await fetchAuthSession(); - const token = session.tokens.idToken; - const { email } = await fetchUserAttributes(); - const response = await fetch( - `${ - import.meta.env.VITE_API_ENDPOINT - }instructor/course_messages?course_id=${encodeURIComponent( - course_id - )}&instructor_email=${encodeURIComponent(email)}`, - { - method: "GET", - headers: { - Authorization: token, - "Content-Type": "application/json", - }, - } - ); - if (response.ok) { - const data = await response.json(); - setAllMessageData(data); - downloadCSV(data); - } else { - console.error("Failed to fetch messages:", response.statusText); - } - } catch (error) { - console.error("Error fetching data:", error); - } - }; - - function downloadCSV(data) { - const headers = Object.keys(data[0]); - const csvRows = data.map((obj) => - headers - .map((header) => { - const value = obj[header]; - // Enclose the value in quotes and escape inner quotes - return `"${String(value).replace(/"/g, '""')}"`; - }) - .join(",") - ); - const csvContent = [headers.join(","), ...csvRows].join("\n"); - const blob = new Blob([csvContent], { type: "text/csv" }); - const link = document.createElement("a"); - link.href = URL.createObjectURL(blob); - link.download = "data.csv"; - document.body.appendChild(link); - link.click(); - document.body.removeChild(link); - } - + const handleGenerateAccessCode = async () => { try { + const session = await fetchAuthSession(); + var token = session.tokens.idToken; + const response = await fetch( `${ import.meta.env.VITE_API_ENDPOINT @@ -234,7 +189,7 @@ export const ViewStudents = ({ courseName, course_id }) => { row.name.toLowerCase().includes(searchQuery.toLowerCase()) ); const handleRowClick = (student) => { - navigate(`/course/${course_id}/student/${student.name}`, { + navigate(`/course/${courseName}/${course_id}/student/${student.name}`, { state: { course_id, student }, }); }; @@ -260,15 +215,6 @@ export const ViewStudents = ({ courseName, course_id }) => { > {courseTitleCase(courseName)} Students - diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000..022e0b4 --- /dev/null +++ b/package-lock.json @@ -0,0 +1,25 @@ +{ + "name": "AI-Learning-Assistant", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "dependencies": { + "postgres": "^3.4.5" + } + }, + "node_modules/postgres": { + "version": "3.4.5", + "resolved": "https://registry.npmjs.org/postgres/-/postgres-3.4.5.tgz", + "integrity": "sha512-cDWgoah1Gez9rN3H4165peY9qfpEo+SA61oQv65O3cRUE1pOEoJWwddwcqKE8XZYjbblOJlYDlLV4h67HrEVDg==", + "license": "Unlicense", + "engines": { + "node": ">=12" + }, + "funding": { + "type": "individual", + "url": "https://github.com/sponsors/porsager" + } + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..33b6f8c --- /dev/null +++ b/package.json @@ -0,0 +1,5 @@ +{ + "dependencies": { + "postgres": "^3.4.5" + } +}