直接上代碼
import com.alibaba.fastjson.JSON;
import com.tc.flink.analysis.label.bean.output.ItemIdWithAction;
import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.common.state.*;
import org.apache.flink.api.common.time.Time;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import java.util.HashMap;
import java.util.Map;
public class CityItemIdClickedState extends RichMapFunction<Tuple2<ItemIdWithAction, Integer>, Tuple2<String, String>> {
private transient MapState<ItemIdWithAction, Integer> map;
public transient static final String CLICK_PREFIX_KEY = "cityClicked";
public transient static final String CREATE_PREFIX_KEY = "cityCreated";
@Override
public void open(Configuration parameters) throws Exception {
StateTtlConfig ttlConfig = StateTtlConfig.newBuilder(Time.minutes(60 * 2)).setUpdateType(StateTtlConfig.UpdateType.OnCreateAndWrite).setStateVisibility(StateTtlConfig.StateVisibility.NeverReturnExpired).build();
MapStateDescriptor<ItemIdWithAction, Integer> descriptor = new MapStateDescriptor<ItemIdWithAction, Integer>("paln_click_num", ItemIdWithAction.class, Integer.class);
descriptor.enableTimeToLive(ttlConfig);
map = getRuntimeContext().getMapState(descriptor);
super.open(parameters);
}
@Override
public Tuple2<String, String> map(Tuple2<ItemIdWithAction, Integer> keyValue) throws Exception {
Integer num = keyValue.f1;
ItemIdWithAction itemIdWithAction = keyValue.f0;
if (num.equals(map.get(itemIdWithAction))) {
return Tuple2.of(null, null);
}
map.put(itemIdWithAction, num);
String prefixKey = itemIdWithAction.getAction().equals("click") ? CLICK_PREFIX_KEY : CREATE_PREFIX_KEY;
String key = String.format("%s@%s@%s", prefixKey, itemIdWithAction.getStartCityId(), itemIdWithAction.getEndCityId());
Map<String, Integer> valueMap = new HashMap<String, Integer>();
for (ItemIdWithAction tmp : map.keys()) { //報錯的異常點
valueMap.put(tmp.getItemId(), map.get(tmp));
}
return Tuple2.of(key, JSON.toJSONString(valueMap));
}
}
map是一條條處理,每次取所有MapState數據輸出。
本地跑,集羣跑都沒問題,但當流量大MapState過大時候,就報如下錯誤,每天報錯一兩次,重啓。
java.util.ConcurrentModificationException
at java.util.HashMap$HashIterator.nextNode(HashMap.java:1442)
at java.util.HashMap$EntryIterator.next(HashMap.java:1476)
at java.util.HashMap$EntryIterator.next(HashMap.java:1474)
at org.apache.flink.runtime.state.ttl.TtlMapState$EntriesIterator.hasNext(TtlMapState.java:161)
at com.tc.flink.operator.state.CityItemIdClickedState.map(CityItemIdClickedState.java:42)
at com.tc.flink.operator.state.CityItemIdClickedState.map(CityItemIdClickedState.java:14)
at org.apache.flink.streaming.api.operators.StreamMap.processElement(StreamMap.java:41)
at org.apache.flink.streaming.runtime.io.StreamInputProcessor.processInput(StreamInputProcessor.java:202)
at org.apache.flink.streaming.runtime.tasks.OneInputStreamTask.run(OneInputStreamTask.java:105)
at org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:302)
at org.apache.flink.runtime.taskmanager.Task.run(Task.java:711)
at java.lang.Thread.run(Thread.java:748)
報錯原因在於 for (ItemIdWithAction tmp : map.keys())
mapstate被併發修改了。
比較奇怪的是map-function單線程處理,爲什麼出現ConcurrentModificationException
查看TtlMapState源碼
originalIterator::remove
是剔除動作。有點類似redis,當再次訪問時候,纔會觸發剔除(有可能產生內存泄漏)。
但是map-funciton是key-by下單線程操作,爲什麼會出現併發問題。
再看TtlStateFactory類
確實異步刪除,所以mapstate過大的時候,就會出現這種問題。
修改代碼
Iterator< Map.Entry<ItemAction,Integer>> mapIterator= map.iterator();
while(mapIterator.hasNext()){
Map.Entry<ItemIdWithAction,Integer> entry= mapIterator.next();
valueMap.put(entry.getKey().getItemId(),entry.getValue());
}
犯了低級錯誤,不過也細讀了源碼