0

我是 Azure Function App 的新手。

我有我想要在调用 http 触发器时运行的 python 代码。

我有新项目并调用“__ init __.py”调用我的代码的正确方法是什么?

这是“__初始化__.py”:

import logging
import azure.functions as func
import UploadToGCS


def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')

    name = req.params.get('name')
    if not name:
        try:
            req_body = req.get_json()
        except ValueError:
            pass
        else:
            name = req_body.get('name')

    if name:
        UploadToGCS(UploadToGCS.upload_files)     <--- I called it here
        return func.HttpResponse(f"Hello, {name}. This HTTP triggered function executed successfully.")
    else:
        return func.HttpResponse(
             "This HTTP triggered function executed successfully. Pass a name in the query string or in the request body for a personalized response.",
             status_code=200
        )

目前我收到“401错误”页面

你能建议它应该怎么做吗?

这是我的 python 代码:(我正在使用中的详细信息将文件上传到 Google Cloud Storage 存储桶config_file = find("gcs_config.json", "C:/")):

from google.cloud import storage
import os
import glob
import json

# Finding path to config file that is called "gcs_config.json" in directory C:/
def find(name, path):
    for root, dirs, files in os.walk(path):
        if name in files:
            return os.path.join(root, name)

def upload_files(config_file):
    # Reading 3 Parameters for upload from JSON file
    with open(config_file, "r") as file:
        contents = json.loads(file.read())
        print(contents)

    # Setting up login credentials
    os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = contents['login_credentials']
    # The ID of GCS bucket
    bucket_name = contents['bucket_name']
    # Setting path to files
    LOCAL_PATH = contents['folder_from']

    for source_file_name in glob.glob(LOCAL_PATH + '/**'):

    # For multiple files upload
    # Setting destination folder according to file name 
        if os.path.isfile(source_file_name):
            partitioned_file_name = os.path.split(source_file_name)[-1].partition("-")
            file_type_name = partitioned_file_name[0]

            # Setting folder where files will be uploaded
            destination_blob_name = file_type_name + "/" + os.path.split(source_file_name)[-1]

            # Setting up required variables for GCS 
            storage_client = storage.Client()
            bucket = storage_client.bucket(bucket_name)
            blob = bucket.blob(destination_blob_name)

            # Running upload and printing confirmation message
            blob.upload_from_filename(source_file_name)
            print("File from {} uploaded to {} in bucket {}.".format(
                source_file_name, destination_blob_name, bucket_name
            ))

config_file = find("gcs_config.json", "C:/")

upload_files(config_file)

亲切的问候,安娜

4

0 回答 0