te')); return $arr; } /* 遍历用户所有主题 * @param $uid 用户ID * @param int $page 页数 * @param int $pagesize 每页记录条数 * @param bool $desc 排序方式 TRUE降序 FALSE升序 * @param string $key 返回的数组用那一列的值作为 key * @param array $col 查询哪些列 */ function thread_tid_find_by_uid($uid, $page = 1, $pagesize = 1000, $desc = TRUE, $key = 'tid', $col = array()) { if (empty($uid)) return array(); $orderby = TRUE == $desc ? -1 : 1; $arr = thread_tid__find($cond = array('uid' => $uid), array('tid' => $orderby), $page, $pagesize, $key, $col); return $arr; } // 遍历栏目下tid 支持数组 $fid = array(1,2,3) function thread_tid_find_by_fid($fid, $page = 1, $pagesize = 1000, $desc = TRUE) { if (empty($fid)) return array(); $orderby = TRUE == $desc ? -1 : 1; $arr = thread_tid__find($cond = array('fid' => $fid), array('tid' => $orderby), $page, $pagesize, 'tid', array('tid', 'verify_date')); return $arr; } function thread_tid_delete($tid) { if (empty($tid)) return FALSE; $r = thread_tid__delete(array('tid' => $tid)); return $r; } function thread_tid_count() { $n = thread_tid__count(); return $n; } // 统计用户主题数 大数量下严谨使用非主键统计 function thread_uid_count($uid) { $n = thread_tid__count(array('uid' => $uid)); return $n; } // 统计栏目主题数 大数量下严谨使用非主键统计 function thread_fid_count($fid) { $n = thread_tid__count(array('fid' => $fid)); return $n; } ?>FastAPI and Google cloud Error 504 when uploading media - Stack Overflow
最新消息:雨落星辰是一个专注网站SEO优化、网站SEO诊断、搜索引擎研究、网络营销推广、网站策划运营及站长类的自媒体原创博客

FastAPI and Google cloud Error 504 when uploading media - Stack Overflow

programmeradmin3浏览0评论

I've been facing this error on GCP for several days, where I get the 504 error on cloud however on the local machine it's working perfectly.

I'm using FastAPI for backend here is the endpoint :

@router.post("/create-pharmacy-user-form", status_code=status.HTTP_201_CREATED)
async def create_form_user(
        db: db_dependency,  # Database dependency
        user_email: str = Form(...),
        user_password: str = Form(...),
        user_first_name: str = Form(...),
        user_last_name: str = Form(...),
        user_phone_number: str = Form(...),
        user_city: str = Form(...),
        user_country: str = Form(...),
        local_name: str = Form(...),
        user_newsletter_subscription: bool = Form(...),
        local_image: UploadFile = File(...),
        local_ice: UploadFile = File(...),
        local_patent: UploadFile = File(...)
    ):
    user_interface = UserInterface(db)
    
    try:
        # transform base 64 to file
        print("Endpoint called")
        user_data = {
            "user_email": user_email,
            "user_password": user_password,
            "user_first_name": user_first_name,
            "user_last_name": user_last_name,
            "user_phone_number": user_phone_number,
            "user_city": user_city,
            "user_country": user_country,
            "local_name": local_name,
            "local_image": local_image,
            "local_ice": local_ice,
            "local_patent": local_patent,
            "user_newsletter_subscription": user_newsletter_subscription,
        }
        db_user = user_interface.create_pharmacy_user(user_data)
        print("Uploading image")
        # Call the image upload function (ensure this is atomic or does not affect DB consistency)
        try :
            post_images(db_user["local_id"], user_data["local_image"], "pharmacy_images")
            print("Image uploaded")

            # Upload documents
            post_document(db_user["user_id"], user_data["local_ice"], "pharmacy_ices")
            post_document(db_user["user_id"], user_data["local_patent"], "pharmacy_patents")
            print("Documents uploaded")
        except Exception as e:
            print(e)

        #posting media
        return api_response(
            message="User created successfully",
            data=db_user,
            status_code=201
        )
    except Exception as e:
        return api_response(
            message="User creation failed: " + str(e),
            status_code=500
        )

and here are the functions:

def post_images(image_id: uuid.UUID, image_file: UploadFile, path: str):
    bucket_name = settings.BUCKET_NAME
    service_account_json = settings.SERVICE_ACCOUNT_JSON

    try:
        # Open image directly from UploadFile's file object
        print("Opening image file")
        # Initialize GCS client
        client = storage.Client.from_service_account_json(service_account_json)
        bucket = client.bucket(bucket_name)

        # Generate a unique blob name
        blob_name = f"{path}/{image_id}.webp"
        blob = bucket.blob(blob_name)

        # Upload image to GCS
        blob.upload_from_file(image_file.file, content_type=image_file.content_type)
        print("Image uploaded successfully")
        print(blob.public_url)
        return {"image_id": str(image_id), "url": blob.public_url}

    except Exception as e:
        print("Error uploading image:", e)
        raise HTTPException(status_code=500, detail="Image upload failed.")


def post_document(document_id: uuid.UUID, document_file: UploadFile, path: str):
    bucket_name = settings.BUCKET_NAME
    service_account_json = settings.SERVICE_ACCOUNT_JSON

    try:
        # Read the file asynchronously
        print("Reading Document file")

        # Guess the mime type of the file
        mime_type = mimetypes.guess_type(document_file.filename)[0] or "application/octet-stream"
        extension = mimetypes.guess_extension(mime_type) or ".bin"

        # Create a file-like object in memory

        # Generate a unique file name with the correct extension
        blob_name = f"{path}/{document_id}{extension}"

        # Initialize GCS client
        client = storage.Client.from_service_account_json(service_account_json)
        bucket = client.bucket(bucket_name)
        blob = bucket.blob(blob_name)

        # Upload the file to GCS asynchronously
        blob.upload_from_file(document_file.file, content_type=mime_type) 
        print("Document uploaded")
        print(blob.public_url)

        return True

    except Exception as e:
        print("Error uploading document:", e)
        raise HTTPException(status_code=500, detail="Document upload failed.")

The error I'm getting on GCP cloud run is 504 : upstream request timeout

I'll appreciate really your help, since I couldn't resolve the issue. I've tried mutliple variations of code, but none has worked.

I've been facing this error on GCP for several days, where I get the 504 error on cloud however on the local machine it's working perfectly.

I'm using FastAPI for backend here is the endpoint :

@router.post("/create-pharmacy-user-form", status_code=status.HTTP_201_CREATED)
async def create_form_user(
        db: db_dependency,  # Database dependency
        user_email: str = Form(...),
        user_password: str = Form(...),
        user_first_name: str = Form(...),
        user_last_name: str = Form(...),
        user_phone_number: str = Form(...),
        user_city: str = Form(...),
        user_country: str = Form(...),
        local_name: str = Form(...),
        user_newsletter_subscription: bool = Form(...),
        local_image: UploadFile = File(...),
        local_ice: UploadFile = File(...),
        local_patent: UploadFile = File(...)
    ):
    user_interface = UserInterface(db)
    
    try:
        # transform base 64 to file
        print("Endpoint called")
        user_data = {
            "user_email": user_email,
            "user_password": user_password,
            "user_first_name": user_first_name,
            "user_last_name": user_last_name,
            "user_phone_number": user_phone_number,
            "user_city": user_city,
            "user_country": user_country,
            "local_name": local_name,
            "local_image": local_image,
            "local_ice": local_ice,
            "local_patent": local_patent,
            "user_newsletter_subscription": user_newsletter_subscription,
        }
        db_user = user_interface.create_pharmacy_user(user_data)
        print("Uploading image")
        # Call the image upload function (ensure this is atomic or does not affect DB consistency)
        try :
            post_images(db_user["local_id"], user_data["local_image"], "pharmacy_images")
            print("Image uploaded")

            # Upload documents
            post_document(db_user["user_id"], user_data["local_ice"], "pharmacy_ices")
            post_document(db_user["user_id"], user_data["local_patent"], "pharmacy_patents")
            print("Documents uploaded")
        except Exception as e:
            print(e)

        #posting media
        return api_response(
            message="User created successfully",
            data=db_user,
            status_code=201
        )
    except Exception as e:
        return api_response(
            message="User creation failed: " + str(e),
            status_code=500
        )

and here are the functions:

def post_images(image_id: uuid.UUID, image_file: UploadFile, path: str):
    bucket_name = settings.BUCKET_NAME
    service_account_json = settings.SERVICE_ACCOUNT_JSON

    try:
        # Open image directly from UploadFile's file object
        print("Opening image file")
        # Initialize GCS client
        client = storage.Client.from_service_account_json(service_account_json)
        bucket = client.bucket(bucket_name)

        # Generate a unique blob name
        blob_name = f"{path}/{image_id}.webp"
        blob = bucket.blob(blob_name)

        # Upload image to GCS
        blob.upload_from_file(image_file.file, content_type=image_file.content_type)
        print("Image uploaded successfully")
        print(blob.public_url)
        return {"image_id": str(image_id), "url": blob.public_url}

    except Exception as e:
        print("Error uploading image:", e)
        raise HTTPException(status_code=500, detail="Image upload failed.")


def post_document(document_id: uuid.UUID, document_file: UploadFile, path: str):
    bucket_name = settings.BUCKET_NAME
    service_account_json = settings.SERVICE_ACCOUNT_JSON

    try:
        # Read the file asynchronously
        print("Reading Document file")

        # Guess the mime type of the file
        mime_type = mimetypes.guess_type(document_file.filename)[0] or "application/octet-stream"
        extension = mimetypes.guess_extension(mime_type) or ".bin"

        # Create a file-like object in memory

        # Generate a unique file name with the correct extension
        blob_name = f"{path}/{document_id}{extension}"

        # Initialize GCS client
        client = storage.Client.from_service_account_json(service_account_json)
        bucket = client.bucket(bucket_name)
        blob = bucket.blob(blob_name)

        # Upload the file to GCS asynchronously
        blob.upload_from_file(document_file.file, content_type=mime_type) 
        print("Document uploaded")
        print(blob.public_url)

        return True

    except Exception as e:
        print("Error uploading document:", e)
        raise HTTPException(status_code=500, detail="Document upload failed.")

The error I'm getting on GCP cloud run is 504 : upstream request timeout

I'll appreciate really your help, since I couldn't resolve the issue. I've tried mutliple variations of code, but none has worked.

Share Improve this question edited 2 days ago Chris 34.3k9 gold badges99 silver badges234 bronze badges asked 2 days ago RedBlue197RedBlue197 211 silver badge1 bronze badge New contributor RedBlue197 is a new contributor to this site. Take care in asking for clarification, commenting, and answering. Check out our Code of Conduct. 2
  • 2 You might find this answer and this answer helpful – Chris Commented 2 days ago
  • You might need to change the timeout value, e.g., blob.upload_from_file(file.file, timeout=180). Please have a look at the links above for more details and options. – Chris Commented 2 days ago
Add a comment  | 

1 Answer 1

Reset to default 0

The error you've encountered indicates the timeout of the upstream container. It means that the request to the server is taking too long to process, causing the timeout. One possible cause is due to large file uploads. It is also possible that Cloud Run might not have enough memory and CPU for file processing which is why it works well locally. You might need to configure it and set memory limits.

As mentioned by Chris, it might be helpful to change the default timeout value. Aside from that, you can implement logging and tracing so you'll be able to know exactly what process is taking too much time. You can read this documentation about logging and viewing logs in Cloud Run.

发布评论

评论列表(0)

  1. 暂无评论