我需要你们的帮助,我需要把这个JSON数据转换成原始数据,但是我不想在这个json上面使用很多for来迭代,所以你们知道如何用一种更好的方式来做吗?
但我不知道如何以“简单”的方式做到这一点,我不想for: for: for: for
谢谢你们!
编辑
包含全部数据的JSON输入:
[
{
"name": "Dummy_App_Name",
"appKey": "Dummy_App_Key",
"platform": "Dummy_Platform",
"data": [
[
{
"id": "ffb1e945-f619-48d9-ab7f-e7a2c1792003",
"name": "Dummy_Ad_Network_Instance_1",
"contents": [
{
"id": "Dummy_id",
"name": "Dummy_Name",
"isSkippable": True,
"offerwallAbTest": None,
"type": "Dummy_Type",
"insights": {
"reports": [
{
"country": "TD",
"clicks": [ 0, 1, 2, 3, 4, 5, 6 ],
"conversions": [ 0, 1, 2, 3, 4, 5, 6 ],
"impressions": [ 0, 1, 2, 3, 4, 5, 6 ],
"dailyUniqueViewers": [ 0, 1, 2, 3, 4, 5, 6 ],
"dailyUniqueConversions": [ 0, 1, 2, 3, 4, 5, 6 ],
"earnings": [ 0, 1, 2, 3, 4, 5, 6 ],
},
{
"country": "SC",
"clicks": [ 0, 1, 2, 3, 4, 5, 6 ],
"conversions": [ 0, 1, 2, 3, 4, 5, 6 ],
"impressions": [ 0, 1, 2, 3, 4, 5, 6 ],
"dailyUniqueViewers": [ 0, 1, 2, 3, 4, 5, 6 ],
"dailyUniqueConversions": [ 0, 1, 2, 3, 4, 5, 6 ],
"earnings": [ 0, 1, 2, 3, 4, 5, 6 ],
}
],
"timestamps": [
"2023-03-06T00:00:00Z",
"2023-03-07T00:00:00Z",
"2023-03-08T00:00:00Z",
"2023-03-09T00:00:00Z",
"2023-03-10T00:00:00Z",
"2023-03-11T00:00:00Z",
"2023-03-12T00:00:00Z"
]
}
}
]
},
{
"id": "be70f064-6226-412f-942c-2a2eeabb8d79",
"name": "Dummy_Ad_Network_Instance_2",
"contents": [
{
"id": "Dummy_Id",
"name": "Dummy_Name",
"isSkippable": True,
"offerwallAbTest": None,
"type": "Dummy_Type",
"insights": {
"reports": [
{
"country": "BY",
"clicks": [ 0, 1, 2, 3, 4, 5, 6 ],
"conversions": [ 0, 1, 2, 3, 4, 5, 6 ],
"impressions": [ 0, 1, 2, 3, 4, 5, 6 ],
"dailyUniqueViewers": [ 0, 1, 2, 3, 4, 5, 6 ],
"dailyUniqueConversions": [ 0, 1, 2, 3, 4, 5, 6 ],
"earnings": [ 0, 1, 2, 3, 4, 5, 6 ],
},
{
"country": "CA",
"clicks": [ 0, 1, 2, 3, 4, 5, 6 ],
"conversions": [ 0, 1, 2, 3, 4, 5, 6 ],
"impressions": [ 0, 1, 2, 3, 4, 5, 6 ],
"dailyUniqueViewers": [ 0, 1, 2, 3, 4, 5, 6 ],
"dailyUniqueConversions": [ 0, 1, 2, 3, 4, 5, 6 ],
"earnings": [ 0, 1, 2, 3, 4, 5, 6 ]
}
],
"timestamps": [
"2023-03-06T00:00:00Z",
"2023-03-07T00:00:00Z",
"2023-03-08T00:00:00Z",
"2023-03-09T00:00:00Z",
"2023-03-10T00:00:00Z",
"2023-03-11T00:00:00Z",
"2023-03-12T00:00:00Z"
]
}
}
]
}
]
]
}
]
预期产出
"date", "app_name", "appKey", "platform", "ad_network_instance", "placement", "country", "earnings", "impressions", "clicks", "conversions", "ecpm", "dailyUniqueViewers", "dailyUniqueConversions"
"2023-03-06T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_1","TD",0,0,0,0,((earning/1000000)/impressions)*1000,0,0,0
"2023-03-07T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_1","TD",1,1,1,1,((earning/1000000)/impressions)*1000,1,1,1
"2023-03-08T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_1","TD",2,2,2,2,((earning/1000000)/impressions)*1000,2,2,2
"2023-03-09T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_1","TD",3,3,3,3,((earning/1000000)/impressions)*1000,3,3,3
"2023-03-10T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_1","TD",4,4,4,4,((earning/1000000)/impressions)*1000,4,4,4
"2023-03-11T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_1","TD",5,5,5,5,((earning/1000000)/impressions)*1000,5,5,5
"2023-03-12T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_1","TD",6,6,6,6,((earning/1000000)/impressions)*1000,6,6,6
"2023-03-06T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_1","SC",0,0,0,0,((earning/1000000)/impressions)*1000,0,0,0
"2023-03-07T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_1","SC",1,1,1,1,((earning/1000000)/impressions)*1000,1,1,1
"2023-03-08T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_1","SC",2,2,2,2,((earning/1000000)/impressions)*1000,2,2,2
"2023-03-09T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_1","SC",3,3,3,3,((earning/1000000)/impressions)*1000,3,3,3
"2023-03-10T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_1","SC",4,4,4,4,((earning/1000000)/impressions)*1000,4,4,4
"2023-03-11T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_1","SC",5,5,5,5,((earning/1000000)/impressions)*1000,5,5,5
"2023-03-12T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_1","SC",6,6,6,6,((earning/1000000)/impressions)*1000,6,6,6
"2023-03-06T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_2","BY",0,0,0,0,((earning/1000000)/impressions)*1000,0,0,0
"2023-03-07T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_2","BY",1,1,1,1,((earning/1000000)/impressions)*1000,1,1,1
"2023-03-08T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_2","BY",2,2,2,2,((earning/1000000)/impressions)*1000,2,2,2
"2023-03-09T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_2","BY",3,3,3,3,((earning/1000000)/impressions)*1000,3,3,3
"2023-03-10T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_2","BY",4,4,4,4,((earning/1000000)/impressions)*1000,4,4,4
"2023-03-11T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_2","BY",5,5,5,5,((earning/1000000)/impressions)*1000,5,5,5
"2023-03-12T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_2","BY",6,6,6,6,((earning/1000000)/impressions)*1000,6,6,6
"2023-03-06T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_2","CA",0,0,0,0,((earning/1000000)/impressions)*1000,0,0,0
"2023-03-07T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_2","CA",1,1,1,1,((earning/1000000)/impressions)*1000,1,1,1
"2023-03-08T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_2","CA",2,2,2,2,((earning/1000000)/impressions)*1000,2,2,2
"2023-03-09T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_2","CA",3,3,3,3,((earning/1000000)/impressions)*1000,3,3,3
"2023-03-10T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_2","CA",4,4,4,4,((earning/1000000)/impressions)*1000,4,4,4
"2023-03-11T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_2","CA",5,5,5,5,((earning/1000000)/impressions)*1000,5,5,5
"2023-03-12T00:00:00Z","Dummy_App_Name","Dummy_App_Key","Dummy_Platform","Dummy_Ad_Network_Instance_2","CA",6,6,6,6,((earning/1000000)/impressions)*1000,6,6,6
2条答案
按热度按时间ruarlubt1#
即使它们看起来像数据行,您也可以将时间戳和其他见解/报告视为 * 列 *:
1.取每个列表并附加到列列表中。
1.使用
zip(*cols)
函数“旋转”或“转置”列列表以获得行列表。首先,预先定义标题:
创建列列表并使用时间戳列表对其进行初始化:
循环标题(跳过时间戳
headers[1:]
)并将每个报告数据列表附加到列:按原样打印,我们会看到:
打印标题,然后将每列压缩到列中(记住星星
*cols
语法,这样zip实际上可以看到(cols[0], cols[1], ...)
):你会看到正确的结构
从这里开始,引入csv模块,并使用它的
writerow(headers)
来编码头文件的单个列表,使用writerows(zip(*cols))
来迭代zip操作的每个结果(行)(上面的正确结构):| 时间戳|喀嗒声|转换|印象|每日唯一查看器|每日唯一转换|收益|
| - ------|- ------|- ------|- ------|- ------|- ------|- ------|
| 2023年3月6日上午00时00分00秒|无|无|无|无|无|无|
| 2023年3月7日上午00时00分00秒|1个|1个|1个|1个|1个|1个|
| 2023年3月8日星期一00时00分00秒|第二章|第二章|第二章|第二章|第二章|第二章|
| 2023年3月9日上午00时00分00秒|三个|三个|三个|三个|三个|三个|
| 2023年3月10日上午00时00分00秒|四个|四个|四个|四个|四个|四个|
| 2023年3月11日上午00时00分00秒|五个|五个|五个|五个|五个|五个|
| 2023年3月12日零时00分00秒|六个|六个|六个|六个|六个|六个|
w8rqjzmb2#
解决此问题的代码是: