Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion comfy_api_nodes/apinode_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -418,7 +418,7 @@ async def upload_video_to_comfyapi(
f"Video duration ({actual_duration:.2f}s) exceeds the maximum allowed ({max_duration}s)."
)
except Exception as e:
logging.error(f"Error getting video duration: {e}")
logging.error("Error getting video duration: %s", str(e))
raise ValueError(f"Could not verify video duration from source: {e}") from e

upload_mime_type = f"video/{container.value.lower()}"
Expand Down
55 changes: 32 additions & 23 deletions comfy_api_nodes/apis/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -356,10 +356,10 @@ async def request(
if params:
params = {k: v for k, v in params.items() if v is not None} # aiohttp fails to serialize None values

logging.debug(f"[DEBUG] Request Headers: {request_headers}")
logging.debug(f"[DEBUG] Files: {files}")
logging.debug(f"[DEBUG] Params: {params}")
logging.debug(f"[DEBUG] Data: {data}")
logging.debug("[DEBUG] Request Headers: %s", request_headers)
logging.debug("[DEBUG] Files: %s", files)
logging.debug("[DEBUG] Params: %s", params)
logging.debug("[DEBUG] Data: %s", data)

if content_type == "application/x-www-form-urlencoded":
payload_args = self._create_urlencoded_form_data_args(data or {}, request_headers)
Expand Down Expand Up @@ -589,9 +589,9 @@ async def _handle_http_error(
error_message=f"HTTP Error {exc.status}",
)

logging.debug(f"[DEBUG] API Error: {user_friendly} (Status: {status_code})")
logging.debug("[DEBUG] API Error: %s (Status: %s)", user_friendly, status_code)
if response_content:
logging.debug(f"[DEBUG] Response content: {response_content}")
logging.debug("[DEBUG] Response content: %s", response_content)

# Retry if eligible
if status_code in self.retry_status_codes and retry_count < self.max_retries:
Expand Down Expand Up @@ -735,11 +735,9 @@ async def execute(self, client: Optional[ApiClient] = None) -> R:
if isinstance(v, Enum):
request_dict[k] = v.value

logging.debug(
f"[DEBUG] API Request: {self.endpoint.method.value} {self.endpoint.path}"
)
logging.debug(f"[DEBUG] Request Data: {json.dumps(request_dict, indent=2)}")
logging.debug(f"[DEBUG] Query Params: {self.endpoint.query_params}")
logging.debug("[DEBUG] API Request: %s %s", self.endpoint.method.value, self.endpoint.path)
logging.debug("[DEBUG] Request Data: %s", json.dumps(request_dict, indent=2))
logging.debug("[DEBUG] Query Params: %s", self.endpoint.query_params)

response_json = await client.request(
self.endpoint.method.value,
Expand All @@ -754,11 +752,11 @@ async def execute(self, client: Optional[ApiClient] = None) -> R:
logging.debug("=" * 50)
logging.debug("[DEBUG] RESPONSE DETAILS:")
logging.debug("[DEBUG] Status Code: 200 (Success)")
logging.debug(f"[DEBUG] Response Body: {json.dumps(response_json, indent=2)}")
logging.debug("[DEBUG] Response Body: %s", json.dumps(response_json, indent=2))
logging.debug("=" * 50)

parsed_response = self.endpoint.response_model.model_validate(response_json)
logging.debug(f"[DEBUG] Parsed Response: {parsed_response}")
logging.debug("[DEBUG] Parsed Response: %s", parsed_response)
return parsed_response
finally:
if owns_client:
Expand Down Expand Up @@ -874,18 +872,21 @@ async def _poll_until_complete(self, client: ApiClient) -> R:
status = TaskStatus.PENDING
for poll_count in range(1, self.max_poll_attempts + 1):
try:
logging.debug(f"[DEBUG] Polling attempt #{poll_count}")
logging.debug("[DEBUG] Polling attempt #%s", poll_count)

request_dict = (
None if self.request is None else self.request.model_dump(exclude_none=True)
)

if poll_count == 1:
logging.debug(
f"[DEBUG] Poll Request: {self.poll_endpoint.method.value} {self.poll_endpoint.path}"
"[DEBUG] Poll Request: %s %s",
self.poll_endpoint.method.value,
self.poll_endpoint.path,
)
logging.debug(
f"[DEBUG] Poll Request Data: {json.dumps(request_dict, indent=2) if request_dict else 'None'}"
"[DEBUG] Poll Request Data: %s",
json.dumps(request_dict, indent=2) if request_dict else "None",
)

# Query task status
Expand All @@ -900,7 +901,7 @@ async def _poll_until_complete(self, client: ApiClient) -> R:

# Check if task is complete
status = self._check_task_status(response_obj)
logging.debug(f"[DEBUG] Task Status: {status}")
logging.debug("[DEBUG] Task Status: %s", status)

# If progress extractor is provided, extract progress
if self.progress_extractor:
Expand All @@ -914,15 +915,15 @@ async def _poll_until_complete(self, client: ApiClient) -> R:
result_url = self.result_url_extractor(response_obj)
if result_url:
message = f"Result URL: {result_url}"
logging.debug(f"[DEBUG] {message}")
logging.debug("[DEBUG] %s", message)
self._display_text_on_node(message)
self.final_response = response_obj
if self.progress_extractor:
progress.update(100)
return self.final_response
if status == TaskStatus.FAILED:
message = f"Task failed: {json.dumps(resp)}"
logging.error(f"[DEBUG] {message}")
logging.error("[DEBUG] %s", message)
raise Exception(message)
logging.debug("[DEBUG] Task still pending, continuing to poll...")
# Task pending – wait
Expand All @@ -936,7 +937,12 @@ async def _poll_until_complete(self, client: ApiClient) -> R:
raise Exception(
f"Polling aborted after {consecutive_errors} network errors: {str(e)}"
) from e
logging.warning("Network error (%s/%s): %s", consecutive_errors, max_consecutive_errors, str(e))
logging.warning(
"Network error (%s/%s): %s",
consecutive_errors,
max_consecutive_errors,
str(e),
)
await asyncio.sleep(self.poll_interval)
except Exception as e:
# For other errors, increment count and potentially abort
Expand All @@ -946,10 +952,13 @@ async def _poll_until_complete(self, client: ApiClient) -> R:
f"Polling aborted after {consecutive_errors} consecutive errors: {str(e)}"
) from e

logging.error(f"[DEBUG] Polling error: {str(e)}")
logging.error("[DEBUG] Polling error: %s", str(e))
logging.warning(
f"Error during polling (attempt {poll_count}/{self.max_poll_attempts}): {str(e)}. "
f"Will retry in {self.poll_interval} seconds."
"Error during polling (attempt %s/%s): %s. Will retry in %s seconds.",
poll_count,
self.max_poll_attempts,
str(e),
self.poll_interval,
)
await asyncio.sleep(self.poll_interval)

Expand Down
6 changes: 3 additions & 3 deletions comfy_api_nodes/apis/request_logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ def get_log_directory():
try:
os.makedirs(log_dir, exist_ok=True)
except Exception as e:
logger.error(f"Error creating API log directory {log_dir}: {e}")
logger.error("Error creating API log directory %s: %s", log_dir, str(e))
# Fallback to base temp directory if sub-directory creation fails
return base_temp_dir
return log_dir
Expand Down Expand Up @@ -122,9 +122,9 @@ def log_request_response(
try:
with open(filepath, "w", encoding="utf-8") as f:
f.write("\n".join(log_content))
logger.debug(f"API log saved to: {filepath}")
logger.debug("API log saved to: %s", filepath)
except Exception as e:
logger.error(f"Error writing API log to {filepath}: {e}")
logger.error("Error writing API log to %s: %s", filepath, str(e))


if __name__ == '__main__':
Expand Down
4 changes: 2 additions & 2 deletions comfy_api_nodes/nodes_kling.py
Original file line number Diff line number Diff line change
Expand Up @@ -296,15 +296,15 @@ def validate_video_result_response(response) -> None:
"""Validates that the Kling task result contains a video."""
if not is_valid_video_response(response):
error_msg = f"Kling task {response.data.task_id} succeeded but no video data found in response."
logging.error(f"Error: {error_msg}.\nResponse: {response}")
logging.error("Error: %s.\nResponse: %s", error_msg, response)
raise Exception(error_msg)


def validate_image_result_response(response) -> None:
"""Validates that the Kling task result contains an image."""
if not is_valid_image_response(response):
error_msg = f"Kling task {response.data.task_id} succeeded but no image data found in response."
logging.error(f"Error: {error_msg}.\nResponse: {response}")
logging.error("Error: %s.\nResponse: %s", error_msg, response)
raise Exception(error_msg)


Expand Down
2 changes: 1 addition & 1 deletion comfy_api_nodes/nodes_minimax.py
Original file line number Diff line number Diff line change
Expand Up @@ -500,7 +500,7 @@ async def execute(
raise Exception(
f"No video was found in the response. Full response: {file_result.model_dump()}"
)
logging.info(f"Generated video URL: {file_url}")
logging.info("Generated video URL: %s", file_url)
if cls.hidden.unique_id:
if hasattr(file_result.file, "backup_download_url"):
message = f"Result URL: {file_url}\nBackup URL: {file_result.file.backup_download_url}"
Expand Down
14 changes: 5 additions & 9 deletions comfy_api_nodes/nodes_moonvalley.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ def trim_video(video: VideoInput, duration_sec: float) -> VideoInput:
audio_stream = None

for stream in input_container.streams:
logging.info(f"Found stream: type={stream.type}, class={type(stream)}")
logging.info("Found stream: type=%s, class=%s", stream.type, type(stream))
if isinstance(stream, av.VideoStream):
# Create output video stream with same parameters
video_stream = output_container.add_stream(
Expand All @@ -247,7 +247,7 @@ def trim_video(video: VideoInput, duration_sec: float) -> VideoInput:
video_stream.height = stream.height
video_stream.pix_fmt = "yuv420p"
logging.info(
f"Added video stream: {stream.width}x{stream.height} @ {stream.average_rate}fps"
"Added video stream: %sx%s @ %sfps", stream.width, stream.height, stream.average_rate
)
elif isinstance(stream, av.AudioStream):
# Create output audio stream with same parameters
Expand All @@ -256,9 +256,7 @@ def trim_video(video: VideoInput, duration_sec: float) -> VideoInput:
)
audio_stream.sample_rate = stream.sample_rate
audio_stream.layout = stream.layout
logging.info(
f"Added audio stream: {stream.sample_rate}Hz, {stream.channels} channels"
)
logging.info("Added audio stream: %sHz, %s channels", stream.sample_rate, stream.channels)

# Calculate target frame count that's divisible by 16
fps = input_container.streams.video[0].average_rate
Expand Down Expand Up @@ -288,9 +286,7 @@ def trim_video(video: VideoInput, duration_sec: float) -> VideoInput:
for packet in video_stream.encode():
output_container.mux(packet)

logging.info(
f"Encoded {frame_count} video frames (target: {target_frames})"
)
logging.info("Encoded %s video frames (target: %s)", frame_count, target_frames)

# Decode and re-encode audio frames
if audio_stream:
Expand All @@ -308,7 +304,7 @@ def trim_video(video: VideoInput, duration_sec: float) -> VideoInput:
for packet in audio_stream.encode():
output_container.mux(packet)

logging.info(f"Encoded {audio_frame_count} audio frames")
logging.info("Encoded %s audio frames", audio_frame_count)

# Close containers
output_container.close()
Expand Down
10 changes: 5 additions & 5 deletions comfy_api_nodes/nodes_rodin.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,16 +172,16 @@ async def create_generate_task(
logging.info("[ Rodin3D API - Submit Jobs ] Submit Generate Task Success!")
subscription_key = response.jobs.subscription_key
task_uuid = response.uuid
logging.info(f"[ Rodin3D API - Submit Jobs ] UUID: {task_uuid}")
logging.info("[ Rodin3D API - Submit Jobs ] UUID: %s", task_uuid)
return task_uuid, subscription_key


def check_rodin_status(response: Rodin3DCheckStatusResponse) -> str:
all_done = all(job.status == JobStatus.Done for job in response.jobs)
status_list = [str(job.status) for job in response.jobs]
logging.info(f"[ Rodin3D API - CheckStatus ] Generate Status: {status_list}")
logging.info("[ Rodin3D API - CheckStatus ] Generate Status: %s", status_list)
if any(job.status == JobStatus.Failed for job in response.jobs):
logging.error(f"[ Rodin3D API - CheckStatus ] Generate Failed: {status_list}, Please try again.")
logging.error("[ Rodin3D API - CheckStatus ] Generate Failed: %s, Please try again.", status_list)
raise Exception("[ Rodin3D API ] Generate Failed, Please Try again.")
if all_done:
return "DONE"
Expand Down Expand Up @@ -235,7 +235,7 @@ async def download_files(url_list, task_uuid):
file_path = os.path.join(save_path, file_name)
if file_path.endswith(".glb"):
model_file_path = file_path
logging.info(f"[ Rodin3D API - download_files ] Downloading file: {file_path}")
logging.info("[ Rodin3D API - download_files ] Downloading file: %s", file_path)
max_retries = 5
for attempt in range(max_retries):
try:
Expand All @@ -246,7 +246,7 @@ async def download_files(url_list, task_uuid):
f.write(chunk)
break
except Exception as e:
logging.info(f"[ Rodin3D API - download_files ] Error downloading {file_path}:{e}")
logging.info("[ Rodin3D API - download_files ] Error downloading %s:%s", file_path, str(e))
if attempt < max_retries - 1:
logging.info("Retrying...")
await asyncio.sleep(2)
Expand Down
2 changes: 1 addition & 1 deletion comfy_api_nodes/nodes_veo2.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,7 @@ async def execute(
initial_response = await initial_operation.execute()
operation_name = initial_response.name

logging.info(f"Veo generation started with operation name: {operation_name}")
logging.info("Veo generation started with operation name: %s", operation_name)

# Define status extractor function
def status_extractor(response):
Expand Down
1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,6 @@ messages_control.disable = [
# next warnings should be fixed in future
"bad-classmethod-argument", # Class method should have 'cls' as first argument
"wrong-import-order", # Standard imports should be placed before third party imports
"logging-fstring-interpolation", # Use lazy % formatting in logging functions
"ungrouped-imports",
"unnecessary-pass",
"unnecessary-lambda-assignment",
Expand Down