0

我的 JSON 金融时间序列数据来自 alphavantage

我怀疑路径标签中没有空格和句点(。)我会没事的。我设法让 TSQL 在没有解析器大惊小怪但没有路径节点数据的情况下运行。特殊字符是原因吗?

 DECLARE @JSON NVARCHAR(MAX)

SELECT @JSON = 
'{
    "Meta Data": {
        "1. Information": "Daily Prices (open, high, low, close) and Volumes",
        "2. Symbol": "XLK",
        "3. Last Refreshed": "2020-06-30",
        "4. Output Size": "Full size",
        "5. Time Zone": "US/Eastern"
    },
    "Time Series (Daily)": {
        "2020-06-30": {
            "1. open": "102.6600",
            "2. high": "104.9100",
            "3. low": "102.5200",
            "4. close": "104.4900",
            "5. volume": "8061852"
        },
        "2020-06-29": {
            "1. open": "101.5300",
            "2. high": "102.6600",
            "3. low": "100.3100",
            "4. close": "102.6500",
            "5. volume": "9271548"
        },
        "2020-06-26": {
            "1. open": "103.3800",
            "2. high": "103.5300",
            "3. low": "101.2300",
            "4. close": "101.5400",
            "5. volume": "22135104"
        },
        "2020-06-25": {
            "1. open": "102.4200",
            "2. high": "103.6900",
            "3. low": "101.3100",
            "4. close": "103.5800",
            "5. volume": "10265454"
        },
        "2020-06-24": {
            "1. open": "104.2000",
            "2. high": "104.7800",
            "3. low": "101.7100",
            "4. close": "102.2700",
            "5. volume": "12710857"
        },
        "2020-06-23": {
            "1. open": "104.8100",
            "2. high": "105.8300",
            "3. low": "104.4800",
            "4. close": "104.6300",
            "5. volume": "9508806"
        },
        "2020-06-22": {
            "1. open": "102.0900",
            "2. high": "103.9700",
            "3. low": "101.9300",
            "4. close": "103.8800",
            "5. volume": "7365608"
        },
        "2020-06-19": {
            "1. open": "104.1900",
            "2. high": "104.3100",
            "3. low": "101.7500",
            "4. close": "102.2400",
            "5. volume": "15638477"
        },
        "2020-06-18": {
            "1. open": "102.3000",
            "2. high": "103.0600",
            "3. low": "102.0800",
            "4. close": "102.9500",
            "5. volume": "6853710"
        },
        "2020-06-17": {
            "1. open": "103.0900",
            "2. high": "103.4600",
            "3. low": "102.3200",
            "4. close": "102.4900",
            "5. volume": "8655904"
        },
        "2020-06-16": {
            "1. open": "102.7700",
            "2. high": "103.3900",
            "3. low": "100.9200",
            "4. close": "102.5100",
            "5. volume": "13967902"
        },
        "2020-06-15": {
            "1. open": "97.6000",
            "2. high": "100.8200",
            "3. low": "97.2800",
            "4. close": "100.3700",
            "5. volume": "14961914"
        },
        "2020-06-12": {
            "1. open": "100.7700",
            "2. high": "101.2700",
            "3. low": "97.6800",
            "4. close": "99.4100",
            "5. volume": "21642002"
        },
        "2020-06-11": {
            "1. open": "102.3000",
            "2. high": "102.4500",
            "3. low": "98.1100",
            "4. close": "98.1400",
            "5. volume": "19867498"
        },
        "2020-06-10": {
            "1. open": "103.1400",
            "2. high": "104.8900",
            "3. low": "102.9500",
            "4. close": "104.1100",
            "5. volume": "12177473"
        },
        "2020-06-09": {
            "1. open": "101.4100",
            "2. high": "102.9000",
            "3. low": "101.3200",
            "4. close": "102.4000",
            "5. volume": "7352624"
        },
        "2020-06-08": {
            "1. open": "101.2300",
            "2. high": "101.9600",
            "3. low": "100.3000",
            "4. close": "101.9200",
            "5. volume": "8064775"
        },
        "2020-06-05": {
            "1. open": "99.5100",
            "2. high": "101.7000",
            "3. low": "99.3400",
            "4. close": "101.4100",
            "5. volume": "10374918"
        }
    }
}
'



    SELECT 
           *
    FROM OPENJSON(@JSON,'$."Time Series (Daily)"') 
    WITH(   

            series_open VARCHAR(18) '$."1. open"',
            searies_high VARCHAR(128) '$."2. high"',
            series_close VARCHAR(128) '$."4. close"',
            searies_volumn VARCHAR(28) '$."5. volume"'

          )

很高兴拥有:为整个集合重复的元数据 为路径中的 5 个子节点中的每一个重复的日期。

谢谢专家!

4

1 回答 1

3

希望以下内容足以让您自己解决这个问题:

你的 JSON(减少到只有一些节点):

DECLARE @JSON NVARCHAR(MAX)=
N'{
    "Meta Data": {
        "1. Information": "Daily Prices (open, high, low, close) and Volumes",
        "2. Symbol": "XLK",
        "3. Last Refreshed": "2020-06-30",
        "4. Output Size": "Full size",
        "5. Time Zone": "US/Eastern"
    },
    "Time Series (Daily)": {
        "2020-06-30": {
            "1. open": "102.6600",
            "2. high": "104.9100",
            "3. low": "102.5200",
            "4. close": "104.4900",
            "5. volume": "8061852"
        },
        "2020-06-29": {
            "1. open": "101.5300",
            "2. high": "102.6600",
            "3. low": "100.3100",
            "4. close": "102.6500",
            "5. volume": "9271548"
        },
        "2020-06-26": {
            "1. open": "103.3800",
            "2. high": "103.5300",
            "3. low": "101.2300",
            "4. close": "101.5400",
            "5. volume": "22135104"
        }
    }
}'

--查询

SELECT JSON_VALUE(A.[Meta Data],'$."1. Information"') AS Meta_Information
      ,JSON_VALUE(A.[Meta Data],'$."2. Symbol"') AS Meta_Symbol
      ,JSON_VALUE(A.[Meta Data],'$."3. Last Refreshed"') AS Meta_LastRefreshed
      ,JSON_VALUE(A.[Meta Data],'$."4. Output Size"') AS Meta_OutputSize
      ,JSON_VALUE(A.[Meta Data],'$."5. Time Zone"') AS Meta_TimeZone
      ,B.[key] AS Data_Date
      ,C.*
FROM OPENJSON(@JSON) WITH([Meta Data]           NVARCHAR(MAX) AS JSON
                         ,[Time Series (Daily)] NVARCHAR(MAX) AS JSON ) A
CROSS APPLY OPENJSON(A.[Time Series (Daily)]) B
CROSS APPLY OPENJSON(B.[value]) WITH([1. open] DECIMAL(10,4)
                                    ,[2. high] DECIMAL(10,4)
                                    --and so on
                                    ) C;

简而言之:

  • 我们与 - 子句OPENJSON()一起使用WITH来获得第一级(MetaTime Series)。
  • 我们AS JSON用来告诉引擎,结果仍然是 JSON,而不是简单的文本。
  • 现在我们可以使用JSON_VALUE()直接读取元数据(这也可以直接对抗@JSON)。
  • 我们可以OPENJSON()再次使用来更深入地潜水。
  • 使用描述性部分作为内容是一个坏习惯。实际上,作为日期值的键最好都是同名并在对象中携带日期......不过我们可以在OPENJSON 没有- 子句的情况下使用WITH来检索key(= 日期) 和value(= 对象) 柱子。
  • 另一个OPENJSON()获取value作为输入并将返回内部值。
于 2020-07-01T07:46:05.877 回答