0

下面是两种方法,它们是我在 AWS 中的状态机的一部分。

首先,使用 S3 SELECT 从 csv 文件中获取数据的方法。

/// <summary>
/// Use S3 Select in order to obtain the data from the source and return it
/// </summary>
/// <param name="s3Object"></param>
/// <param name="s3Client"></param>
/// <param name="definition"></param>
/// <returns></returns>
private static async Task<ISelectObjectContentEventStream> GetSelectObjectContentEventStream(S3Object s3Object,
    AmazonS3Client s3Client, ObjectDefinition definition)
{
    var response = await s3Client.SelectObjectContentAsync(new SelectObjectContentRequest()
    {
        Bucket = s3Object.BucketName,
        Key = s3Object.Key,
        ExpressionType = ExpressionType.SQL,
        Expression = "select * from S3Object",
        InputSerialization = new InputSerialization()
        {
            CSV = new CSVInput()
            {
                FileHeaderInfo = FileHeaderInfo.Ignore,
                FieldDelimiter = ",",
            }
        },
        OutputSerialization = new OutputSerialization()
        {
            JSON = new JSONOutput()
        }
    });

    return response.Payload;
}

现在,调用它的方法:

public async Task<StaticDataConsumerDefinition> ConvertFromSourceS3Async(StaticDataConsumerDefinition staticDataConsumer, ILambdaContext context)
{
    using (var s3Client = new AmazonS3Client())
    {
        foreach (ObjectDefinition definition in staticDataConsumer.TargetList.Objects)
        {
            var listRequest = new ListObjectsV2Request
            {
                BucketName = definition.FilePath,
                MaxKeys = 1000
            };

            ListObjectsV2Response listResponse;
            listResponse = s3Client.ListObjectsV2Async(listRequest).Result; // Force synchronous

            if (definition.LogActivity)
            {
                context.Logger.LogLine($"Response from S3 Request: {listResponse.HttpStatusCode} ({listResponse.HttpStatusCode.ToString()})");
            }

            foreach (var entity in listResponse.S3Objects.Where(n => n.Key.Contains(definition.FilePrefix)))
            {
                if (entity.Key.Contains(definition.FileExtension))
                {
                    context.Logger.LogLine($"entity {entity.Key}");

                    using (var s3Events = await GetSelectObjectContentEventStream(entity, s3Client, definition))
                    {
                        foreach (var ev in s3Events)
                        {
                            context.Logger.LogLine($"Received {ev.GetType().Name}!");
                            if (ev is RecordsEvent records)
                            {
                                context.Logger.LogLine("The contents of the Records Event is...");
                                using (var reader = new StreamReader(records.Payload))
                                {
                                    context.Logger.Log(reader.ReadToEnd());
                                }
                            }
                        }
                    }
                }
            }
        }
    }
        context.Logger.Log($"Passing ConvertSourceData {ConvertToIndentedJson(staticDataConsumer)}");
    return staticDataConsumer;
}

但是,我从 CloudWatch 日志中获取的数据是垃圾 - 它看起来有点像 ASCII 字符/或编码字符?不是我所期待的!有什么想法吗?

{ "_1": "\u00001\u00000\u00005\u0000", "_2": "\u0000K\u0000a\u0000t\u0000e\u0000\u0000F\u0000a\u0000r\u0000d\u0000e\u00000l\u0000l\u u0000" }

{ "_1": "\u00001\u00000\u00006\u0000", "_2": "\u0000S\u0000h\u0000o\u0000n\u0000a\u0000\u0000M\u0000a\u0000r\u0000i\u00000n\u0\000\u u0000" }

{ "_1": "\u00001\u00000\u00008\u0000", "_2": "\u0000S\u0000h\u0000o\u0000n\u0000a\u0000 \u0000M\u0000a\u0000r\u0000i\u00000n\u0\000o\u u0000" }

{ "_1": "\u00001\u00001\u00001\u0000", "_2": "\u0000S\u0000h\u0000o\u0000n\u0000a\u0000 \u0000M\u0000a\u0000r\u0000i\u00000n\u0\000o\u u0000" }

{ "_1": "\u00001\u00001\u00002\u0000", "_2": "\u0000L\u0000i\u0000n\u0000a\u0000\u0000H\u0000a\u0000n\u0000n\u0000a\u00000w\u0\000\u u0000" }

{ "_1": "\u00001\u00001\u00003\u0000", "_2": "\u0000J\u0000e\u0000n\u0000n\u0000i\u0000f\u0000e\u0000r\u0000\u0000H\u00000a\u00000\u r\u0000" }

{ "_1": "\u00001\u00001\u00004\u0000", "_2": "\u0000S\u0000t\u0000a\u0000n\u0000 \u0000K\u0000a\u0000k\u0000k\u0000a\u00000s\u00000i\u\ r\u0000" }

{ "_1": "\u00001\u00001\u00006\u0000", "_2": "\u0000S\u0000t\u0000a\u0000n\u0000 \u0000K\u0000a\u0000k\u0000k\u0000a\u00000s\u00000s\u\ r\u0000" }

{ "_1": "\u00001\u00001\u00008\u0000", "_2": "\u0000S\u0000t\u0000a\u0000n\u0000 \u0000K\u0000a\u0000k\u0000k\u0000a\u00000s\u00000i\u\ r\u0000" }

{ "_1": "\u00001\u00001\u00009\u0000", "_2": "\u0000S\u0000h\u0000o\u0000n\u0000a\u0000 \u0000M\u0000a\u0000r\u0000i\u00000n\u0\000o\u u0000" }

{ "_1": "\u00001\u00002\u00007\u0000", "_2": "\u0000A\u0000y\u0000d\u0000i\u0000n\u0000\u0000T\u0000e\u0000b\u0000y\u00000a\u00000a\u\u u0000n\u0000\r\u0000" }

{ "_1": "\u00001\u00002\u00008\u0000", "_2": "\u0000C\u0000a\u0000m\u0000e\u0000r\u0000o\u0000n\u0000\u0000P\u0000a\u00000l\u00000r\u u0000\r\u0000" }

{ "_1": "\u00001\u00009\u00007\u0000", "_2": "\u0000S\u0000h\u0000a\u0000r\u0000o\u0000n\u0000\u0000B\u0000e\u0000r\u00000g\u00000\u\u r\u0000" }

{ "_1": "\u00002\u00000\u00001\u0000", "_2": "\u0000K\u0000a\u0000r\u0000e\u0000e\u0000n\u0000a\u0000\u0000D\u0000a\u00000v\u00000e\u\u u0000\r\u0000" }

{ "_1": "\u00002\u00000\u00002\u0000", "_2": "\u0000L\u0000i\u0000n\u0000a\u0000 \u0000H\u0000a\u0000n\u0000n\u0000a\u00000w\u0\000\u u0000" }

{ "_1": "\u00002\u00000\u00003\u0000", "_2": "\u0000S\u0000a\u0000m\u0000\u0000V\u0000i\u0000t\u0000a\u0000n\u0000z\u00000a\u000\r\" }

{ "_1": "\u00002\u00000\u00006\u0000", "_2": "\u0000K\u0000a\u0000r\u0000e\u0000e\u0000n\u0000a\u0000\u0000D\u0000a\u00000v\u00000e\u u0000\r\u0000" }

{ "_1": "\u00002\u00000\u00008\u0000", "_2": "\u0000K\u0000a\u0000r\u0000e\u0000e\u0000n\u0000a\u0000\u0000D\u0000a\u00000v\u00000e\u u0000\r\u0000" }

{ "_1": "\u00002\u00001\u00004\u0000", "_2": "\u0000K\u0000a\u0000r\u0000e\u0000e\u0000n\u0000a\u0000\u0000D\u0000a\u00000v\u00000e\u\u u0000\r\u0000" }

{ "_1": "\u00002\u00001\u00007\u0000", "_2": "\u0000K\u0000y\u0000l\u0000i\u0000e\u0000\u0000B\u0000r\u0000a\u0000d\u000000\u00000e\u r\u0000" }

{ "_1": "\u00002\u00001\u00008\u0000", "_2": "\u0000K\u0000a\u0000t\u0000e\u0000 \u0000F\u0000a\u0000r\u0000d\u0000e\u00000l\u0\00l\u u0000" }

{ "_1": "\u00002\u00001\u00009\u0000", "_2": "\u0000C\u0000a\u0000m\u0000e\u0000r\u0000o\u0000n\u0000\u0000P\u0000a\u00000l\u00000r\u u0000\r\u0000" }

{ "_1": "\u00002\u00002\u00003\u0000", "_2": "\u0000S\u0000a\u0000m\u0000\u0000V\u0000i\u0000t\u0000a\u0000n\u0000z\u00000a\u000\r\" }

{“_1”:“\u00002\u00002\u00005\u0000”,“_2”:“\u0000K\u0000a\u0000r\u0000e\u0000e\u0000n\u0000a\u0000\u0000D\u0000a\u00000v\u00000e\u u0000\r\u0000" }

{ "_1": "\u00002\u00002\u00006\u0000", "_2": "\u0000K\u0000a\u0000t\u0000e\u0000 \u0000F\u0000a\u0000r\u0000d\u0000e\u00000l\u0\00l\u u0000" }

{ "_1": "\u00002\u00002\u00008\u0000", "_2": "\u0000K\u0000a\u0000r\u0000e\u0000e\u0000n\u0000a\u0000\u0000D\u0000a\u00000v\u00000e\u u0000\r\u0000" }

{ "_1": "\u00002\u00002\u00009\u0000", "_2": "\u0000K\u0000a\u0000r\u0000e\u0000e\u0000n\u0000a\u0000\u0000D\u0000a\u00000v\u00000e\u\u u0000\r\u0000" }

{ "_1": "\u00002\u00003\u00000\u0000", "_2": "\u0000K\u0000a\u0000r\u0000e\u0000e\u0000n\u0000a\u0000\u0000D\u0000a\u00000v\u00000e\u u0000\r\u0000" }

这是实际的 CSV 数据

Retail Store,Store Retail Business Manager
105,Kate Fardell
106,Shona Marino
108,Shona Marino
111,Shona Marino
112,Lina Hannawe
113,Jennifer Hale
114,Stan Kakkasis
116,Stan Kakkasis
118,Stan Kakkasis
119,Shona Marino
127,Aydin Tebyanian
128,Cameron Palmer
197,Sharon Berger
201,Kareena Davies
202,Lina Hannawe
203,Sam Vitanza
206,Kareena Davies
208,Kareena Davies
214,Kareena Davies
217,Kylie Bradley
218,Kate Fardell
219,Cameron Palmer
223,Sam Vitanza
225,Kareena Davies
226,Kate Fardell
228,Kareena Davies
229,Kareena Davies
230,Kareena Davies
4

1 回答 1

1

我假设它可能是编码?

制作这一行:

using (var reader = new StreamReader(records.Payload))

像这样:

using (var reader = new StreamReader(records.Payload, System.Text.Encoding.UTF8))
于 2018-11-02T00:00:37.793 回答