Facebook API 线下转化事件参数数据必须是数组错误
Facebook API offline conversion event param data must be an array error
我正在尝试使用一些自定义字段将事件上传到 Facebook 的离线转化数据集,但我收到以下错误
Status: 400
Response:
{
"error": {
"message": "(#100) param data must be an array.",
"type": "OAuthException",
"code": 100,
"fbtrace_id": "A5qsezd_MfvKEYYTVfPcu29"
}
}
我指的是上传线下活动的页面。
https://developers.facebook.com/docs/marketing-api/offline-conversions/
CSV结构
email,event_name,event_time,value,dept,brand,tx_type,cust_type,cust_trend
79FBB38FC843911533020FD8DE5B29CBA9958F,Purchase,2020-06-15T07:42:47Z,100.25, RENTAL,NAN,PA,Active,Growth (+15% to LY)
8EF89542E99BF7D8C0D4AA9F218,Purchase,2020-06-15T17:46:13Z,50,DEPOSITS, NAN,Other,Active,Declined (-15% to LY)
4C83B542E9C9566AA8D6A5279839115E7C0C454A1,Purchase,2020-06-15T09:55:01Z,150,DEPOSITS, NAN,PA,Active,Declined (-15% to LY)
361604C2B8FC67,Purchase,2020-06-15T15:41:18Z,50,DEPOSITS, NAN,OtherNew (Less than 3 Months),Did Not Shop LY
09133B0CDFA527BA9013CA8F1A0382D76F9,Purchase,2020-06-15T08:44:47Z,1,DEPOSITS, NAN,PX,Active,Growth (+15% to LY)
50cff131E2B3042C6E533ss225146C37994E2C2,Purchase,2020-06-15T07:35:50Z,300,DEPOSITS, NAN,Other,ActiveGrowth (+10% to LY)
ECD35DBB79FF37B0FC95E131,Purchase,2020-06-15T16:13:28Z,50,DEPOSITS, NAN,PX,Active,Decline (-12% to LY)
代码:
def upload_offline_conversion(**args):
from facebook_business.adobjects.offlineconversiondataset import OfflineConversionDataSet
from facebook_business.api import FacebookAdsApi
import pandas as pd
#import gcsfs
import json
access_token = access_token
FacebookAdsApi.init(app_id=app_id,access_token=access_token)
offline_dataset = OfflineConversionDataSet(dataset_id)
df = pd.read_csv('UPLOADS.csv',sep =',')
df['event_time'] = (pd.to_datetime(df['event_time']).astype(int) / 10 ** 9).astype(int).astype(str)
df['match_keys'] = df.apply(lambda row: json.dumps({k: [row[k]] if k in ['email'] else row[k] for k in ['email'] if pd.notnull(row[k])}), axis=1)
del df['email'] # deleting match_keys single columns since they are now useless
df["currency"]='CAD'
data = (df.groupby(['event_name','event_time','match_keys','value','currency'], as_index=False)
.apply(lambda x: x[['dept','brand','tx_type','cust_type','cust_trend']].to_dict('r'))
.reset_index()
.rename(columns={0:'custom_data'}).to_json(orient='records'))
print(data)
batch_limit = 2000 # Maximum number of events permitted in a single call
for i in range(0, len(data), batch_limit):
params = {
'upload_tag': 'upload_test',
'data': data[i:i+batch_limit],
}
# print(params)
#offline_dataset.create_event(params=params)
预计o/p
data=[
{
match_keys: {"email": ['79FBB38FC843911533020FD8DE5B29CBA9958F']},
currency: "CAD",
value: 100.25,
event_name: "Purchase",
event_time: 1592206967,
custom_data: {
dept: "RENTAL",
brand:"NAN",
tx_type:"PA",
cust_type:"ACTIVE",
cust_trend:"Growth (+15% to LY)"
},
},
{
match_keys: {"email": ["8EF89542E99BF7D8C0D4AA9F218"]},
currency: "CAD",
value: 50,
event_name: "Purchase",
event_time: 1592243173,
custom_data: {
dept: "RENTAL",
brand:"NAN",
tx_type:"PA",
cust_type:"ACTIVE",
cust_trend:"Growth (+15% to LY)"
},
},
#and so on...................
]
我的示例输出:
{'upload_tag': 'sales_upload_test_final',
'data': '[
{"event_name":"Purchase",
"event_time":"1592243173",
"match_keys":"{"\email\": [\"8EF89542E99BF7D8C0D4AA9F218"\]}",
"value":"50",
"currency":"CAD",
"custom_data":[{"dept":"DEPOSITS","brand":" NAN","tx_type":"Other","cust_type":"Active","cust_trend":"Declined (-15% to LY)"}]}]}
需要指定截至 2020 年 7 月 1 日的 LDU。
Code:
def upload_offline_conversion(**args):
from facebook_business.adobjects.offlineconversiondataset import OfflineConversionDataSet
from facebook_business.api import FacebookAdsApi
import pandas as pd
#import gcsfs
import json
access_token = access_token
FacebookAdsApi.init(app_id=app_id,access_token=access_token)
offline_dataset = OfflineConversionDataSet(dataset_id)
df = pd.read_csv('UPLOADS.csv',sep =',')
df['event_time'] = (pd.to_datetime(df['event_time']).astype(int) / 10 ** 9).astype(int).astype(str)
df['match_keys'] = df.apply(lambda row: json.dumps({k: [row[k]] if k in ['email'] else row[k] for k in ['email'] if pd.notnull(row[k])}), axis=1)
del df['email'] # deleting match_keys single columns since they are now useless
df["currency"]='CAD'
data = (df.groupby(['event_name','event_time','match_keys','value','currency'], as_index=False)
.apply(lambda x: x[['dept','brand','tx_type','cust_type','cust_trend']].to_dict('r'))
.reset_index()
.rename(columns={0:'custom_data'}).to_dict(orient='records'))
df = pd.DataFrame(data)
df["data_processing_options"]= [[]] * df.shape[0] #(Value either [] or ["LDU"] )
data = df.to_dict(orient="records")
batch_limit = 2000 # Maximum number of events permitted in a single call
for i in range(0, len(data), batch_limit):
params = {
'upload_tag': 'upload_test',
'data': data[i:i+batch_limit],
}
# print(params)
#offline_dataset.create_event(
params=params)
我正在尝试使用一些自定义字段将事件上传到 Facebook 的离线转化数据集,但我收到以下错误
Status: 400
Response:
{
"error": {
"message": "(#100) param data must be an array.",
"type": "OAuthException",
"code": 100,
"fbtrace_id": "A5qsezd_MfvKEYYTVfPcu29"
}
}
我指的是上传线下活动的页面。
https://developers.facebook.com/docs/marketing-api/offline-conversions/
CSV结构
email,event_name,event_time,value,dept,brand,tx_type,cust_type,cust_trend
79FBB38FC843911533020FD8DE5B29CBA9958F,Purchase,2020-06-15T07:42:47Z,100.25, RENTAL,NAN,PA,Active,Growth (+15% to LY)
8EF89542E99BF7D8C0D4AA9F218,Purchase,2020-06-15T17:46:13Z,50,DEPOSITS, NAN,Other,Active,Declined (-15% to LY)
4C83B542E9C9566AA8D6A5279839115E7C0C454A1,Purchase,2020-06-15T09:55:01Z,150,DEPOSITS, NAN,PA,Active,Declined (-15% to LY)
361604C2B8FC67,Purchase,2020-06-15T15:41:18Z,50,DEPOSITS, NAN,OtherNew (Less than 3 Months),Did Not Shop LY
09133B0CDFA527BA9013CA8F1A0382D76F9,Purchase,2020-06-15T08:44:47Z,1,DEPOSITS, NAN,PX,Active,Growth (+15% to LY)
50cff131E2B3042C6E533ss225146C37994E2C2,Purchase,2020-06-15T07:35:50Z,300,DEPOSITS, NAN,Other,ActiveGrowth (+10% to LY)
ECD35DBB79FF37B0FC95E131,Purchase,2020-06-15T16:13:28Z,50,DEPOSITS, NAN,PX,Active,Decline (-12% to LY)
代码:
def upload_offline_conversion(**args):
from facebook_business.adobjects.offlineconversiondataset import OfflineConversionDataSet
from facebook_business.api import FacebookAdsApi
import pandas as pd
#import gcsfs
import json
access_token = access_token
FacebookAdsApi.init(app_id=app_id,access_token=access_token)
offline_dataset = OfflineConversionDataSet(dataset_id)
df = pd.read_csv('UPLOADS.csv',sep =',')
df['event_time'] = (pd.to_datetime(df['event_time']).astype(int) / 10 ** 9).astype(int).astype(str)
df['match_keys'] = df.apply(lambda row: json.dumps({k: [row[k]] if k in ['email'] else row[k] for k in ['email'] if pd.notnull(row[k])}), axis=1)
del df['email'] # deleting match_keys single columns since they are now useless
df["currency"]='CAD'
data = (df.groupby(['event_name','event_time','match_keys','value','currency'], as_index=False)
.apply(lambda x: x[['dept','brand','tx_type','cust_type','cust_trend']].to_dict('r'))
.reset_index()
.rename(columns={0:'custom_data'}).to_json(orient='records'))
print(data)
batch_limit = 2000 # Maximum number of events permitted in a single call
for i in range(0, len(data), batch_limit):
params = {
'upload_tag': 'upload_test',
'data': data[i:i+batch_limit],
}
# print(params)
#offline_dataset.create_event(params=params)
预计o/p
data=[
{
match_keys: {"email": ['79FBB38FC843911533020FD8DE5B29CBA9958F']},
currency: "CAD",
value: 100.25,
event_name: "Purchase",
event_time: 1592206967,
custom_data: {
dept: "RENTAL",
brand:"NAN",
tx_type:"PA",
cust_type:"ACTIVE",
cust_trend:"Growth (+15% to LY)"
},
},
{
match_keys: {"email": ["8EF89542E99BF7D8C0D4AA9F218"]},
currency: "CAD",
value: 50,
event_name: "Purchase",
event_time: 1592243173,
custom_data: {
dept: "RENTAL",
brand:"NAN",
tx_type:"PA",
cust_type:"ACTIVE",
cust_trend:"Growth (+15% to LY)"
},
},
#and so on...................
]
我的示例输出:
{'upload_tag': 'sales_upload_test_final',
'data': '[
{"event_name":"Purchase",
"event_time":"1592243173",
"match_keys":"{"\email\": [\"8EF89542E99BF7D8C0D4AA9F218"\]}",
"value":"50",
"currency":"CAD",
"custom_data":[{"dept":"DEPOSITS","brand":" NAN","tx_type":"Other","cust_type":"Active","cust_trend":"Declined (-15% to LY)"}]}]}
需要指定截至 2020 年 7 月 1 日的 LDU。
Code:
def upload_offline_conversion(**args):
from facebook_business.adobjects.offlineconversiondataset import OfflineConversionDataSet
from facebook_business.api import FacebookAdsApi
import pandas as pd
#import gcsfs
import json
access_token = access_token
FacebookAdsApi.init(app_id=app_id,access_token=access_token)
offline_dataset = OfflineConversionDataSet(dataset_id)
df = pd.read_csv('UPLOADS.csv',sep =',')
df['event_time'] = (pd.to_datetime(df['event_time']).astype(int) / 10 ** 9).astype(int).astype(str)
df['match_keys'] = df.apply(lambda row: json.dumps({k: [row[k]] if k in ['email'] else row[k] for k in ['email'] if pd.notnull(row[k])}), axis=1)
del df['email'] # deleting match_keys single columns since they are now useless
df["currency"]='CAD'
data = (df.groupby(['event_name','event_time','match_keys','value','currency'], as_index=False)
.apply(lambda x: x[['dept','brand','tx_type','cust_type','cust_trend']].to_dict('r'))
.reset_index()
.rename(columns={0:'custom_data'}).to_dict(orient='records'))
df = pd.DataFrame(data)
df["data_processing_options"]= [[]] * df.shape[0] #(Value either [] or ["LDU"] )
data = df.to_dict(orient="records")
batch_limit = 2000 # Maximum number of events permitted in a single call
for i in range(0, len(data), batch_limit):
params = {
'upload_tag': 'upload_test',
'data': data[i:i+batch_limit],
}
# print(params)
#offline_dataset.create_event(
params=params)