Skip to content

Commit

Permalink
fix: more logging added.
Browse files Browse the repository at this point in the history
  • Loading branch information
rainmanjam committed Aug 26, 2024
1 parent ef9daad commit b1009fa
Show file tree
Hide file tree
Showing 2 changed files with 75 additions and 12 deletions.
49 changes: 49 additions & 0 deletions .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
name: Deploy to Google Cloud Run

on:
push:
branches:
- main

jobs:
build:
runs-on: ubuntu-latest

steps:
- name: Checkout code
uses: actions/checkout@v3

- name: Set up Google Cloud SDK
uses: google-github-actions/setup-gcloud@v1
with:
version: 'latest'
service_account_key: ${{ secrets.GCP_SA_KEY }}
project_id: ${{ secrets.GCP_PROJECT_ID }}

- name: Increment version and tag release
id: tag_version
uses: anothrnick/[email protected]
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
default_bump: patch
tag_prefix: ''

- name: Get the version
run: echo "Release version: ${{ steps.tag_version.outputs.new_tag }}"

- name: Authenticate Docker with Google Cloud
run: gcloud auth configure-docker

- name: Build Docker Image
run: docker build -t gcr.io/${{ secrets.GCP_PROJECT_ID }}/my-app-image:${{ steps.tag_version.outputs.new_tag }} .

- name: Push Docker Image to Google Container Registry
run: docker push gcr.io/${{ secrets.GCP_PROJECT_ID }}/my-app-image:${{ steps.tag_version.outputs.new_tag }}

- name: Deploy to Google Cloud Run
run: |
gcloud run deploy my-app-service \
--image gcr.io/${{ secrets.GCP_PROJECT_ID }}/my-app-image:${{ steps.tag_version.outputs.new_tag }} \
--region ${{ secrets.GCP_REGION }} \
--platform managed \
--allow-unauthenticated
38 changes: 26 additions & 12 deletions routes/convert.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,8 @@ def convert_media_to_mp3():

job_id = str(uuid.uuid4())
output_filename = f"{job_id}.mp3"
logger.info(f"Job {job_id}: Starting conversion for {media_url}")
logger.info(f"Job {job_id}: Starting conversion for media URL: {media_url}")
logger.info(f"Job {job_id}: Output filename set to: {output_filename}")

def process_and_notify():
logger.info(f"Job {job_id}: Entering process_and_notify function.")
Expand All @@ -42,71 +43,84 @@ def process_and_notify():
output_path = process_conversion(media_url, job_id)
logger.info(f"Job {job_id}: Conversion completed. Output file: {output_path}")

# Check if the output file exists after conversion
if not os.path.exists(output_path):
# Check if the output file exists after conversion (only if local)
if STORAGE_PATH == 'local' and not os.path.exists(output_path):
logger.error(f"Job {job_id}: Output file not found at: {output_path}")
raise Exception(f"Output file not found after conversion: {output_path}")

uploaded_file_url = None

# Handle different storage methods
if STORAGE_PATH == 'gcp':
logger.info(f"Job {job_id}: STORAGE_PATH is set to GCP. File already in Google Cloud Storage.")
logger.info(f"Job {job_id}: STORAGE_PATH is set to GCP. Using existing Google Cloud Storage file URL.")
uploaded_file_url = output_path
elif STORAGE_PATH == 'drive':
if GDRIVE_USER:
logger.info(f"Job {job_id}: Uploading to Google Drive for user '{GDRIVE_USER}'...")
# Log the file path before uploading
logger.info(f"Job {job_id}: Uploading to Google Drive for user: {GDRIVE_USER}")
logger.info(f"Job {job_id}: Uploading file from path: {output_path}")
uploaded_file_url = upload_to_gdrive(output_path, output_filename)
logger.info(f"Job {job_id}: Uploaded file to Google Drive. URL: {uploaded_file_url}")
else:
logger.error(f"Job {job_id}: GDRIVE_USER environment variable is not set.")
raise Exception("GDRIVE_USER is not set while STORAGE_PATH is set to Drive")
elif STORAGE_PATH == 'local':
logger.info(f"Job {job_id}: STORAGE_PATH is set to local. Moving file to local storage...")
logger.info(f"Job {job_id}: Moving file to local storage.")
uploaded_file_url = move_to_local_storage(output_path, output_filename)
logger.info(f"Job {job_id}: File moved to local storage. Path: {uploaded_file_url}")
else:
logger.error(f"Job {job_id}: Invalid STORAGE_PATH value: {STORAGE_PATH}")
raise Exception(f"Invalid STORAGE_PATH: {STORAGE_PATH}")

if not uploaded_file_url:
logger.error(f"Job {job_id}: Failed to upload/move the output file.")
raise Exception(f"Failed to upload/move the output file {output_path}")

logger.info(f"Job {job_id}: File uploaded/moved successfully. URL/Path: {uploaded_file_url}")
logger.info(f"Job {job_id}: File uploaded/moved successfully. Final URL/Path: {uploaded_file_url}")

# Now it's safe to remove the local file
os.remove(output_path)
logger.info(f"Job {job_id}: Removed local file {output_path}")
# Now it's safe to remove the local file if it's not already uploaded
if STORAGE_PATH == 'local':
os.remove(output_path)
logger.info(f"Job {job_id}: Removed local file {output_path}")

# Send success webhook
if webhook_url:
logger.info(f"Job {job_id}: Sending success webhook to {webhook_url}")
send_webhook(webhook_url, {
"endpoint": "/media-to-mp3",
"job_id": job_id,
"response": uploaded_file_url,
"code": 200,
"message": "success"
})
logger.info(f"Job {job_id}: Success webhook sent.")

except Exception as e:
logger.error(f"Job {job_id}: Error during processing - {e}")

# Send failure webhook
if webhook_url:
logger.info(f"Job {job_id}: Sending failure webhook to {webhook_url}")
send_webhook(webhook_url, {
"endpoint": "/media-to-mp3",
"job_id": job_id,
"error": str(e),
"code": 500,
"message": "failed"
})
logger.info(f"Job {job_id}: Failure webhook sent.")
finally:
logger.info(f"Job {job_id}: Exiting process_and_notify function.")

if webhook_url:
logger.info(f"Job {job_id}: Webhook URL provided. Starting processing in a separate thread.")
threading.Thread(target=process_and_notify).start()
return jsonify({"job_id": job_id, "filename": output_filename}), 202
else:
try:
logger.info(f"Job {job_id}: No webhook URL provided. Starting synchronous processing.")
output_path = process_conversion(media_url, job_id)
logger.info(f"Job {job_id}: Returning successful response. Output file: {output_path}")
return jsonify({"job_id": job_id, "filename": output_filename}), 200
except Exception as e:
logger.error(f"Job {job_id}: Error during processing - {e}")
return jsonify({"error": str(e)}), 500
return jsonify({"error": str(e)}), 500

0 comments on commit b1009fa

Please sign in to comment.