有没有办法将作业安排到 Apache Flink 中的特定处理器?

Is there a way to schedule jobs to specific processor in Apache Flink?

我是Apache Flink 的新用户,目前打算在异构处理系统上测试调度算法。因此,我将每个作业部署到哪个处理器变得非常重要。但是,我找不到如何指定要将作业部署到的处理器 ID,也找不到使处理器 return 可用的方法。 如果您能给我一些关于如何执行这些操作的提示,我将衷心感谢您的帮助。希望你今天过得愉快:)

我通过了一个类似的问题来调度和监视 flink 子任务到机器的特定 cpu 核心。我使用 LinuxJNAAffinity 来解决我的问题 (https://github.com/OpenHFT/Java-Thread-Affinity) 。也许您可以将您的解决方案基于我的。这是我的一个 UDF。

import java.util.BitSet;
import java.util.List;

import org.apache.flink.api.common.functions.RichMapFunction;
import org.apache.flink.api.java.tuple.Tuple3;
import org.apache.flink.configuration.Configuration;
import org.sense.flink.pojo.Point;
import org.sense.flink.pojo.ValenciaItem;
import org.sense.flink.util.CRSCoordinateTransformer;
import org.sense.flink.util.CpuGauge;
import org.sense.flink.util.SimpleGeographicalPolygons;

import net.openhft.affinity.impl.LinuxJNAAffinity;

public class ValenciaItemDistrictMap extends RichMapFunction<ValenciaItem, ValenciaItem> {
    private static final long serialVersionUID = 624354384779615610L;
    private SimpleGeographicalPolygons sgp;
    private transient CpuGauge cpuGauge;
    private BitSet affinity;
    private boolean pinningPolicy;

    public ValenciaItemDistrictMap() {
        this(false);
    }

    public ValenciaItemDistrictMap(boolean pinningPolicy) {
        this.pinningPolicy = pinningPolicy;
    }

    @Override
    public void open(Configuration parameters) throws Exception {
        super.open(parameters);
        this.sgp = new SimpleGeographicalPolygons();
        this.cpuGauge = new CpuGauge();
        getRuntimeContext().getMetricGroup().gauge("cpu", cpuGauge);

        if (this.pinningPolicy) {
            // listing the cpu cores available
            int nbits = Runtime.getRuntime().availableProcessors();
            // pinning operator' thread to a specific cpu core
            this.affinity = new BitSet(nbits);
            affinity.set(((int) Thread.currentThread().getId() % nbits));
            LinuxJNAAffinity.INSTANCE.setAffinity(affinity);
        }
    }

    @Override
    public ValenciaItem map(ValenciaItem value) throws Exception {
        // updates the CPU core current in use
        this.cpuGauge.updateValue(LinuxJNAAffinity.INSTANCE.getCpu());
        System.err.println(ValenciaItemDistrictMap.class.getSimpleName() + " thread[" + Thread.currentThread().getId()
                + "] core[" + this.cpuGauge.getValue() + "]");

        List<Point> coordinates = value.getCoordinates();
        boolean flag = true;
        int i = 0;
        while (flag) {
            Tuple3<Long, Long, String> adminLevel = sgp.getAdminLevel(coordinates.get(i));
            if (adminLevel.f0 != null && adminLevel.f1 != null) {
                value.setId(adminLevel.f0);
                value.setAdminLevel(adminLevel.f1);
                value.setDistrict(adminLevel.f2);
                flag = false;
            } else {
                i++;
            }
        }
        if (flag) {
            // if we did not find a district with the given coordinate we assume the
            // district 16
            value.clearCoordinates();
            value.addCoordinates(
                    new Point(724328.279007, 4374887.874634, CRSCoordinateTransformer.DEFAULT_CRS_EPSG_25830));
            value.setId(16L);
            value.setAdminLevel(9L);
            value.setDistrict("Benicalap");
        }
        return value;
    }
}