计算非 NA 的行数
Count number of rows that are not NA
所以我有一个如下所示的数据框:
"date","id_station","id_parameter","valor","unit","year","day","month","hour","zona"
2019-01-01 00:00:00,"AJM","CO",NA,15,2019,1,1,0,"SO"
2019-01-01 00:00:00,"ATI","CO",NA,15,2019,1,1,0,"NO"
2019-01-01 00:00:00,"BJU","CO",NA,15,2019,1,1,0,"CE"
2019-01-01 00:00:00,"CAM","CO",NA,15,2019,1,1,0,"NO"
2019-01-01 00:00:00,"CCA","CO",NA,15,2019,1,1,0,"SO"
2019-01-01 00:00:00,"CHO","CO",NA,15,2019,1,1,0,"SE"
2019-01-01 00:00:00,"CUA","CO",NA,15,2019,1,1,0,"SO"
2019-01-01 00:00:00,"FAC","CO",NA,15,2019,1,1,0,"NO"
2019-01-01 00:00:00,"HGM","CO",NA,15,2019,1,1,0,"CE"
2019-01-01 00:00:00,"IZT","CO",NA,15,2019,1,1,0,"CE"
2019-01-01 00:00:00,"LLA","CO",NA,15,2019,1,1,0,"NE"
2019-01-01 00:00:00,"LPR","CO",NA,15,2019,1,1,0,"NE"
2019-01-01 00:00:00,"MER","CO",NA,15,2019,1,1,0,"CE"
2019-01-01 00:00:00,"MGH","CO",NA,15,2019,1,1,0,"SO"
2019-01-01 00:00:00,"NEZ","CO",NA,15,2019,1,1,0,"NE"
2019-01-01 00:00:00,"PED","CO",NA,15,2019,1,1,0,"SO"
2019-01-01 00:00:00,"SAG","CO",NA,15,2019,1,1,0,"NE"
2019-01-01 00:00:00,"SFE","CO",NA,15,2019,1,1,0,"SO"
2019-01-01 00:00:00,"SJA","CO",NA,15,2019,1,1,0,"NO"
2019-01-01 00:00:00,"TAH","CO",NA,15,2019,1,1,0,"SE"
2019-01-01 00:00:00,"TLA","CO",NA,15,2019,1,1,0,"NO"
2019-01-01 00:00:00,"TLI","CO",NA,15,2019,1,1,0,"NO"
2019-01-01 00:00:00,"UAX","CO",NA,15,2019,1,1,0,"SE"
2019-01-01 00:00:00,"UIZ","CO",NA,15,2019,1,1,0,"SE"
2019-01-01 00:00:00,"VIF","CO",NA,15,2019,1,1,0,"NE"
2019-01-01 00:00:00,"XAL","CO",NA,15,2019,1,1,0,"NE"
2019-01-01 01:00:00,"AJM","CO",NA,15,2019,1,1,1,"SO"
2019-01-01 01:00:00,"ATI","CO",NA,15,2019,1,1,1,"NO"
2019-01-01 01:00:00,"BJU","CO",NA,15,2019,1,1,1,"CE"
2019-01-01 01:00:00,"CAM","CO",NA,15,2019,1,1,1,"NO"
2019-01-01 01:00:00,"CCA","CO",NA,15,2019,1,1,1,"SO"
2019-01-01 01:00:00,"CHO","CO",NA,15,2019,1,1,1,"SE"
2019-01-01 01:00:00,"CUA","CO",NA,15,2019,1,1,1,"SO"
2019-01-01 01:00:00,"FAC","CO",NA,15,2019,1,1,1,"NO"
2019-01-01 01:00:00,"HGM","CO",NA,15,2019,1,1,1,"CE"
2019-01-01 01:00:00,"IZT","CO",NA,15,2019,1,1,1,"CE"
2019-01-01 01:00:00,"LLA","CO",NA,15,2019,1,1,1,"NE"
2019-01-01 01:00:00,"LPR","CO",NA,15,2019,1,1,1,"NE"
2019-01-01 01:00:00,"MER","CO",NA,15,2019,1,1,1,"CE"
2019-01-01 01:00:00,"MGH","CO",NA,15,2019,1,1,1,"SO"
2019-01-01 01:00:00,"NEZ","CO",NA,15,2019,1,1,1,"NE"
2019-01-01 01:00:00,"PED","CO",NA,15,2019,1,1,1,"SO"
2019-01-01 01:00:00,"SAG","CO",NA,15,2019,1,1,1,"NE"
2019-01-01 01:00:00,"SFE","CO",NA,15,2019,1,1,1,"SO"
2019-01-01 01:00:00,"SJA","CO",NA,15,2019,1,1,1,"NO"
2019-01-01 01:00:00,"TAH","CO",NA,15,2019,1,1,1,"SE"
2019-01-01 01:00:00,"TLA","CO",NA,15,2019,1,1,1,"NO"
2019-01-01 01:00:00,"TLI","CO",NA,15,2019,1,1,1,"NO"
2019-01-01 01:00:00,"UAX","CO",NA,15,2019,1,1,1,"SE"
2019-01-01 01:00:00,"UIZ","CO",NA,15,2019,1,1,1,"SE"
2019-01-01 01:00:00,"VIF","CO",NA,15,2019,1,1,1,"NE"
2019-01-01 01:00:00,"XAL","CO",NA,15,2019,1,1,1,"NE"
而我想要做的是根据 id_station、id_parameter、年、日和月对所有内容进行分组。之后,我想计算每天“勇气”中不 NA 的行数。
最后,我想确定每个 id_station 的每一天有多少天至少有 18 个非 NA 值。如果少于 274 天,我想删除与 id_station
关联的所有值
我该怎么做?
按感兴趣的列分组后,获取逻辑向量的 sum
作为计数,即 - is.na(valor)
returns 为 TRUE 的逻辑向量,其中有 NA 和 FALSE非 NA,否定 (!
) 以反转它并得到逻辑的 sum
例如每个 TRUE (-> 1
) 代表一个非 NA 元素
library(dplyr)
df1 %>%
group_by(id_station, id_parameter, year, day, month) %>%
summarise(Count = sum(!is.na(valor)))
另一种可能的选择是
aggregate(
cbind(Count = !is.na(valor)) ~id_station + id_parameter + year + day + month,
df,
sum
)
所以我有一个如下所示的数据框:
"date","id_station","id_parameter","valor","unit","year","day","month","hour","zona"
2019-01-01 00:00:00,"AJM","CO",NA,15,2019,1,1,0,"SO"
2019-01-01 00:00:00,"ATI","CO",NA,15,2019,1,1,0,"NO"
2019-01-01 00:00:00,"BJU","CO",NA,15,2019,1,1,0,"CE"
2019-01-01 00:00:00,"CAM","CO",NA,15,2019,1,1,0,"NO"
2019-01-01 00:00:00,"CCA","CO",NA,15,2019,1,1,0,"SO"
2019-01-01 00:00:00,"CHO","CO",NA,15,2019,1,1,0,"SE"
2019-01-01 00:00:00,"CUA","CO",NA,15,2019,1,1,0,"SO"
2019-01-01 00:00:00,"FAC","CO",NA,15,2019,1,1,0,"NO"
2019-01-01 00:00:00,"HGM","CO",NA,15,2019,1,1,0,"CE"
2019-01-01 00:00:00,"IZT","CO",NA,15,2019,1,1,0,"CE"
2019-01-01 00:00:00,"LLA","CO",NA,15,2019,1,1,0,"NE"
2019-01-01 00:00:00,"LPR","CO",NA,15,2019,1,1,0,"NE"
2019-01-01 00:00:00,"MER","CO",NA,15,2019,1,1,0,"CE"
2019-01-01 00:00:00,"MGH","CO",NA,15,2019,1,1,0,"SO"
2019-01-01 00:00:00,"NEZ","CO",NA,15,2019,1,1,0,"NE"
2019-01-01 00:00:00,"PED","CO",NA,15,2019,1,1,0,"SO"
2019-01-01 00:00:00,"SAG","CO",NA,15,2019,1,1,0,"NE"
2019-01-01 00:00:00,"SFE","CO",NA,15,2019,1,1,0,"SO"
2019-01-01 00:00:00,"SJA","CO",NA,15,2019,1,1,0,"NO"
2019-01-01 00:00:00,"TAH","CO",NA,15,2019,1,1,0,"SE"
2019-01-01 00:00:00,"TLA","CO",NA,15,2019,1,1,0,"NO"
2019-01-01 00:00:00,"TLI","CO",NA,15,2019,1,1,0,"NO"
2019-01-01 00:00:00,"UAX","CO",NA,15,2019,1,1,0,"SE"
2019-01-01 00:00:00,"UIZ","CO",NA,15,2019,1,1,0,"SE"
2019-01-01 00:00:00,"VIF","CO",NA,15,2019,1,1,0,"NE"
2019-01-01 00:00:00,"XAL","CO",NA,15,2019,1,1,0,"NE"
2019-01-01 01:00:00,"AJM","CO",NA,15,2019,1,1,1,"SO"
2019-01-01 01:00:00,"ATI","CO",NA,15,2019,1,1,1,"NO"
2019-01-01 01:00:00,"BJU","CO",NA,15,2019,1,1,1,"CE"
2019-01-01 01:00:00,"CAM","CO",NA,15,2019,1,1,1,"NO"
2019-01-01 01:00:00,"CCA","CO",NA,15,2019,1,1,1,"SO"
2019-01-01 01:00:00,"CHO","CO",NA,15,2019,1,1,1,"SE"
2019-01-01 01:00:00,"CUA","CO",NA,15,2019,1,1,1,"SO"
2019-01-01 01:00:00,"FAC","CO",NA,15,2019,1,1,1,"NO"
2019-01-01 01:00:00,"HGM","CO",NA,15,2019,1,1,1,"CE"
2019-01-01 01:00:00,"IZT","CO",NA,15,2019,1,1,1,"CE"
2019-01-01 01:00:00,"LLA","CO",NA,15,2019,1,1,1,"NE"
2019-01-01 01:00:00,"LPR","CO",NA,15,2019,1,1,1,"NE"
2019-01-01 01:00:00,"MER","CO",NA,15,2019,1,1,1,"CE"
2019-01-01 01:00:00,"MGH","CO",NA,15,2019,1,1,1,"SO"
2019-01-01 01:00:00,"NEZ","CO",NA,15,2019,1,1,1,"NE"
2019-01-01 01:00:00,"PED","CO",NA,15,2019,1,1,1,"SO"
2019-01-01 01:00:00,"SAG","CO",NA,15,2019,1,1,1,"NE"
2019-01-01 01:00:00,"SFE","CO",NA,15,2019,1,1,1,"SO"
2019-01-01 01:00:00,"SJA","CO",NA,15,2019,1,1,1,"NO"
2019-01-01 01:00:00,"TAH","CO",NA,15,2019,1,1,1,"SE"
2019-01-01 01:00:00,"TLA","CO",NA,15,2019,1,1,1,"NO"
2019-01-01 01:00:00,"TLI","CO",NA,15,2019,1,1,1,"NO"
2019-01-01 01:00:00,"UAX","CO",NA,15,2019,1,1,1,"SE"
2019-01-01 01:00:00,"UIZ","CO",NA,15,2019,1,1,1,"SE"
2019-01-01 01:00:00,"VIF","CO",NA,15,2019,1,1,1,"NE"
2019-01-01 01:00:00,"XAL","CO",NA,15,2019,1,1,1,"NE"
而我想要做的是根据 id_station、id_parameter、年、日和月对所有内容进行分组。之后,我想计算每天“勇气”中不 NA 的行数。
最后,我想确定每个 id_station 的每一天有多少天至少有 18 个非 NA 值。如果少于 274 天,我想删除与 id_station
关联的所有值我该怎么做?
按感兴趣的列分组后,获取逻辑向量的 sum
作为计数,即 - is.na(valor)
returns 为 TRUE 的逻辑向量,其中有 NA 和 FALSE非 NA,否定 (!
) 以反转它并得到逻辑的 sum
例如每个 TRUE (-> 1
) 代表一个非 NA 元素
library(dplyr)
df1 %>%
group_by(id_station, id_parameter, year, day, month) %>%
summarise(Count = sum(!is.na(valor)))
另一种可能的选择是
aggregate(
cbind(Count = !is.na(valor)) ~id_station + id_parameter + year + day + month,
df,
sum
)