EGTF.java

  1. package org.opentrafficsim.core.egtf;

  2. import java.util.LinkedHashMap;
  3. import java.util.LinkedHashSet;
  4. import java.util.Map;
  5. import java.util.NavigableMap;
  6. import java.util.Objects;
  7. import java.util.Set;
  8. import java.util.SortedMap;
  9. import java.util.TreeMap;
  10. import java.util.stream.IntStream;

  11. /**
  12.  * Extended Generalized Treiber-Helbing Filter (van Lint and Hoogendoorn, 2009). This is an extension of the Adaptive Smoothing
  13.  * Method (Treiber and Helbing, 2002). A fast filter for equidistant grids (Schreiter et al., 2010) is available. This fast
  14.  * implementation also supports multiple data sources.
  15.  * <p>
  16.  * To allow flexible usage the EGTF works with {@code DataSource}, {@code DataStream} and {@code Quantity}.
  17.  * <p>
  18.  * A {@code DataSource}, such as "loop detectors", "floating-car data" or "camera" is mostly an identifier, but can be requested
  19.  * to provide several data streams.
  20.  * <p>
  21.  * A {@code DataStream} is one {@code DataSource} supplying one {@code Quantity}. For instance "loop detectors" supplying
  22.  * "flow". In a {@code DataStream}, supplied by the {@code DataSource}, standard deviation of measurements in congestion and
  23.  * free flow are defined. These determine the reliability of the {@code Quantity} data from the given {@code DataSource}, and
  24.  * thus ultimately the weight of the data in the estimation of the quantity.
  25.  * <p>
  26.  * A {@code Quantity}, such as "flow" or "density" defines what is measured and what is requested as output. The output can be
  27.  * in typed format using a {@code Converter}. Default quantities are available under {@code SPEED_SI}, {@code FLOW_SI} and
  28.  * {@code DENSITY_SI}, all under {@code Quantity}.
  29.  * <p>
  30.  * Data can be added using several methods for point data, vector data (multiple independent location-time combinations) and
  31.  * grid data (data in a grid pattern). Data is added for a particular {@code DataStream}.
  32.  * <p>
  33.  * For simple use-cases where a single data source is used, data can be added directly with a {@code Quantity}, in which case a
  34.  * default {@code DataSource}, and default {@code DataStream} for each {@code Quantity} is internally used. All data should
  35.  * either be added using {@code Quantity}'s, or it should all be added using {@code DataSource}'s. Otherwise relative data
  36.  * reliability is undefined.
  37.  * <p>
  38.  * Output can be requested from the EGTF using a {@code Kernel}, a spatiotemporal pattern determining measurement weights. The
  39.  * {@code Kernel} defines an optional maximum spatial and temporal range for measurements to consider, and uses a {@code Shape}
  40.  * to determine the weight for a given distance and time from the estimated point. By default this is an exponential function. A
  41.  * Gaussian kernel is also available, while any other shape could be also be implemented.
  42.  * <p>
  43.  * Parameters from the EGTF are found in the following places:
  44.  * <ul>
  45.  * <li>{@code EGTF}: <i>cCong</i>, <i>cFree</i>, <i>deltaV</i> and <i>vc</i>, defining the overall traffic flow properties.</li>
  46.  * <li>{@code Kernel}: <i>tMax</i> and <i>xMax</i>, defining the maximum range to consider.</li>
  47.  * <li>{@code KernelShape}: <i>sigma</i> and <i>tau</i>, determining the decay of weights for further measurements in space and
  48.  * time.</li>
  49.  * <li>{@code DataStream}: <i>thetaCong</i> and <i>thetaFree</i>, defining the reliability by the standard deviation of measured
  50.  * data in free flow and congestion from a particular data stream.</li>
  51.  * </ul>
  52.  * References:
  53.  * <ul>
  54.  * <li>van Lint, J. W. C. and Hoogendoorn, S. P. (2009). A robust and efficient method for fusing heterogeneous data from
  55.  * traffic sensors on freeways. Computer Aided Civil and Infrastructure Engineering, accepted for publication.</li>
  56.  * <li>Schreiter, T., van Lint, J. W. C., Treiber, M. and Hoogendoorn, S. P. (2010). Two fast implementations of the Adaptive
  57.  * Smoothing Method used in highway traffic state estimation. 13th International IEEE Conference on Intelligent Transportation
  58.  * Systems, 19-22 Sept. 2010, Funchal, Portugal.</li>
  59.  * <li>Treiber, M. and Helbing, D. (2002). Reconstructing the spatio-temporal traffic dynamics from stationary detector data.
  60.  * Cooper@tive Tr@nsport@tion Dyn@mics, 1:3.1–3.24.</li>
  61.  * </ul>
  62.  * <p>
  63.  * Copyright (c) 2013-2020 Delft University of Technology, PO Box 5, 2600 AA, Delft, the Netherlands. All rights reserved. <br>
  64.  * BSD-style license. See <a href="http://opentrafficsim.org/node/13">OpenTrafficSim License</a>.
  65.  * <p>
  66.  * @version $Revision$, $LastChangedDate$, by $Author$, initial version 3 okt. 2018 <br>
  67.  * @author <a href="http://www.transport.citg.tudelft.nl">Wouter Schakel</a>
  68.  */
  69. public class EGTF
  70. {

  71.     /** Default sigma value. */
  72.     private static final double DEFAULT_SIGMA = 300.0;

  73.     /** Default tau value. */
  74.     private static final double DEFAULT_TAU = 30.0;

  75.     /** Filter kernel. */
  76.     private Kernel kernel;

  77.     /** Shock wave speed in congestion. */
  78.     private final double cCong;

  79.     /** Shock wave speed in free flow. */
  80.     private final double cFree;

  81.     /** Speed range between congestion and free flow. */
  82.     private final double deltaV;

  83.     /** Flip-over speed below which we have congestion. */
  84.     private final double vc;

  85.     /** Data sources by label so we can return the same instances upon repeated request. */
  86.     private final Map<String, DataSource> dataSources = new LinkedHashMap<>();

  87.     /** Default data source for cases where a single data source is used. */
  88.     private DataSource defaultDataSource = null;

  89.     /** Default data streams for cases where a single data source is used. */
  90.     private Map<Quantity<?, ?>, DataStream<?>> defaultDataStreams = null;

  91.     /** True if data is currently being added using a quantity, in which case a check should not occur. */
  92.     private boolean addingByQuantity;

  93.     /** All point data sorted by space and time, and per data stream. */
  94.     private NavigableMap<Double, NavigableMap<Double, Map<DataStream<?>, Double>>> data = new TreeMap<>();

  95.     /** Whether the calculation was interrupted. */
  96.     private boolean interrupted = false;

  97.     /** Listeners. */
  98.     private Set<EgtfListener> listeners = new LinkedHashSet<>();

  99.     /**
  100.      * Constructor using cCong = -18km/h, cFree = 80km/h, deltaV = 10km/h and vc = 80km/h. A default kernel is set.
  101.      */
  102.     public EGTF()
  103.     {
  104.         this(-18.0, 80.0, 10.0, 80.0);
  105.     }

  106.     /**
  107.      * Constructor defining global settings. A default kernel is set.
  108.      * @param cCong double; shock wave speed in congestion [km/h]
  109.      * @param cFree double; shock wave speed in free flow [km/h]
  110.      * @param deltaV double; speed range between congestion and free flow [km/h]
  111.      * @param vc double; flip-over speed below which we have congestion [km/h]
  112.      */
  113.     public EGTF(final double cCong, final double cFree, final double deltaV, final double vc)
  114.     {
  115.         this.cCong = cCong / 3.6;
  116.         this.cFree = cFree / 3.6;
  117.         this.deltaV = deltaV / 3.6;
  118.         this.vc = vc / 3.6;
  119.         setKernel();
  120.     }

  121.     /**
  122.      * Convenience constructor that also sets a specified kernel.
  123.      * @param cCong double; shock wave speed in congestion [km/h]
  124.      * @param cFree double; shock wave speed in free flow [km/h]
  125.      * @param deltaV double; speed range between congestion and free flow [km/h]
  126.      * @param vc double; flip-over speed below which we have congestion [km/h]
  127.      * @param sigma double; spatial kernel size in [m]
  128.      * @param tau double; temporal kernel size in [s]
  129.      * @param xMax double; maximum spatial range in [m]
  130.      * @param tMax double; maximum temporal range in [s]
  131.      */
  132.     @SuppressWarnings("parameternumber")
  133.     public EGTF(final double cCong, final double cFree, final double deltaV, final double vc, final double sigma,
  134.             final double tau, final double xMax, final double tMax)
  135.     {
  136.         this(cCong, cFree, deltaV, vc);
  137.         setKernelSI(sigma, tau, xMax, tMax);
  138.     }

  139.     // ********************
  140.     // *** DATA METHODS ***
  141.     // ********************

  142.     /**
  143.      * Return a data source, which is created if necessary.
  144.      * @param name String; unique name for the data source
  145.      * @return DataSource; data source
  146.      * @throws IllegalStateException when data has been added without a data source
  147.      */
  148.     public DataSource getDataSource(final String name)
  149.     {
  150.         if (this.defaultDataSource != null)
  151.         {
  152.             throw new IllegalStateException(
  153.                     "Obtaining a (new) data source after data has been added without a data source is not allowed.");
  154.         }
  155.         return this.dataSources.computeIfAbsent(name, (key) -> new DataSource(key));
  156.     }

  157.     /**
  158.      * Removes all data from before the given time. This is useful in live usages of this class, where older data is no longer
  159.      * required.
  160.      * @param time double; time before which all data can be removed
  161.      */
  162.     public synchronized void clearDataBefore(final double time)
  163.     {
  164.         for (SortedMap<Double, Map<DataStream<?>, Double>> map : this.data.values())
  165.         {
  166.             map.subMap(Double.NEGATIVE_INFINITY, time).clear();
  167.         }
  168.     }

  169.     /**
  170.      * Adds point data.
  171.      * @param quantity Quantity&lt;?, ?&gt;; quantity of the data
  172.      * @param location double; location in [m]
  173.      * @param time double; time in [s]
  174.      * @param value double; data value
  175.      * @throws IllegalStateException if data was added with a data stream previously
  176.      */
  177.     public synchronized void addPointDataSI(final Quantity<?, ?> quantity, final double location, final double time,
  178.             final double value)
  179.     {
  180.         this.addingByQuantity = true;
  181.         addPointDataSI(getDefaultDataStream(quantity), location, time, value);
  182.         this.addingByQuantity = false;
  183.     }

  184.     /**
  185.      * Adds point data.
  186.      * @param dataStream DataStream&lt;?&gt;; data stream of the data
  187.      * @param location double; location in [m]
  188.      * @param time double; time in [s]
  189.      * @param value double; data value
  190.      * @throws IllegalStateException if data was added with a quantity previously
  191.      */
  192.     public synchronized void addPointDataSI(final DataStream<?> dataStream, final double location, final double time,
  193.             final double value)
  194.     {
  195.         checkNoQuantityData();
  196.         Objects.requireNonNull(dataStream, "Datastream may not be null.");
  197.         if (!Double.isNaN(value))
  198.         {
  199.             getSpacioTemporalData(getSpatialData(location), time).put(dataStream, value);
  200.         }
  201.     }

  202.     /**
  203.      * Adds vector data.
  204.      * @param quantity Quantity&lt;?, ?&gt;; quantity of the data
  205.      * @param location double[]; locations in [m]
  206.      * @param time double[]; times in [s]
  207.      * @param values double[]; data values in SI unit
  208.      * @throws IllegalStateException if data was added with a data stream previously
  209.      */
  210.     public synchronized void addVectorDataSI(final Quantity<?, ?> quantity, final double[] location, final double[] time,
  211.             final double[] values)
  212.     {
  213.         this.addingByQuantity = true;
  214.         addVectorDataSI(getDefaultDataStream(quantity), location, time, values);
  215.         this.addingByQuantity = false;
  216.     }

  217.     /**
  218.      * Adds vector data.
  219.      * @param dataStream DataStream&lt;?&gt;; data stream of the data
  220.      * @param location double[]; locations in [m]
  221.      * @param time double[]; times in [s]
  222.      * @param values double[]; data values in SI unit
  223.      * @throws IllegalStateException if data was added with a quantity previously
  224.      */
  225.     public synchronized void addVectorDataSI(final DataStream<?> dataStream, final double[] location, final double[] time,
  226.             final double[] values)
  227.     {
  228.         checkNoQuantityData();
  229.         Objects.requireNonNull(dataStream, "Datastream may not be null.");
  230.         Objects.requireNonNull(location, "Location may not be null.");
  231.         Objects.requireNonNull(time, "Time may not be null.");
  232.         Objects.requireNonNull(values, "Values may not be null.");
  233.         if (location.length != time.length || time.length != values.length)
  234.         {
  235.             throw new IllegalArgumentException(String.format("Unequal lengths: location %d, time %d, data %d.", location.length,
  236.                     time.length, values.length));
  237.         }
  238.         for (int i = 0; i < values.length; i++)
  239.         {
  240.             if (!Double.isNaN(values[i]))
  241.             {
  242.                 getSpacioTemporalData(getSpatialData(location[i]), time[i]).put(dataStream, values[i]);
  243.             }
  244.         }
  245.     }

  246.     /**
  247.      * Adds grid data.
  248.      * @param quantity Quantity&lt;?, ?&gt;; quantity of the data
  249.      * @param location double[]; locations in [m]
  250.      * @param time double[]; times in [s]
  251.      * @param values double[][]; data values in SI unit
  252.      * @throws IllegalStateException if data was added with a data stream previously
  253.      */
  254.     public synchronized void addGridDataSI(final Quantity<?, ?> quantity, final double[] location, final double[] time,
  255.             final double[][] values)
  256.     {
  257.         this.addingByQuantity = true;
  258.         addGridDataSI(getDefaultDataStream(quantity), location, time, values);
  259.         this.addingByQuantity = false;
  260.     }

  261.     /**
  262.      * Adds grid data.
  263.      * @param dataStream DataStream&lt;?&gt;; data stream of the data
  264.      * @param location double[]; locations in [m]
  265.      * @param time double[]; times in [s]
  266.      * @param values double[][]; data values in SI unit
  267.      * @throws IllegalStateException if data was added with a quantity previously
  268.      */
  269.     public synchronized void addGridDataSI(final DataStream<?> dataStream, final double[] location, final double[] time,
  270.             final double[][] values)
  271.     {
  272.         checkNoQuantityData();
  273.         Objects.requireNonNull(dataStream, "Datastream may not be null.");
  274.         Objects.requireNonNull(location, "Location may not be null.");
  275.         Objects.requireNonNull(time, "Time may not be null.");
  276.         Objects.requireNonNull(values, "Values may not be null.");
  277.         if (values.length != location.length)
  278.         {
  279.             throw new IllegalArgumentException(
  280.                     String.format("%d locations while length of data is %d", location.length, values.length));
  281.         }
  282.         for (int i = 0; i < location.length; i++)
  283.         {
  284.             if (values[i].length != time.length)
  285.             {
  286.                 throw new IllegalArgumentException(
  287.                         String.format("%d times while length of data is %d", time.length, values[i].length));
  288.             }
  289.             Map<Double, Map<DataStream<?>, Double>> spatialData = getSpatialData(location[i]);
  290.             for (int j = 0; j < time.length; j++)
  291.             {
  292.                 if (!Double.isNaN(values[i][j]))
  293.                 {
  294.                     getSpacioTemporalData(spatialData, time[j]).put(dataStream, values[i][j]);
  295.                 }
  296.             }
  297.         }
  298.     }

  299.     /**
  300.      * Check that no data was added using a quantity.
  301.      * @throws IllegalStateException if data was added with a quantity previously
  302.      */
  303.     private void checkNoQuantityData()
  304.     {
  305.         if (!this.addingByQuantity && this.defaultDataSource != null)
  306.         {
  307.             throw new IllegalStateException(
  308.                     "Adding data with a data stream is not allowed after data has been added with a quantity.");
  309.         }
  310.     }

  311.     /**
  312.      * Returns a default data stream and checks that no data with a data stream was added.
  313.      * @param quantity Quantity&lt;?, ?&gt;; quantity
  314.      * @return DataStream&lt;?&gt;; default data stream
  315.      * @throws IllegalStateException if data was added with a data stream previously
  316.      */
  317.     private DataStream<?> getDefaultDataStream(final Quantity<?, ?> quantity)
  318.     {
  319.         Objects.requireNonNull(quantity, "Quantity may not be null.");
  320.         if (!this.dataSources.isEmpty())
  321.         {
  322.             throw new IllegalStateException(
  323.                     "Adding data with a quantity is not allowed after data has been added with a data stream.");
  324.         }
  325.         if (this.defaultDataSource == null)
  326.         {
  327.             this.defaultDataSource = new DataSource("default");
  328.             this.defaultDataStreams = new LinkedHashMap<>();
  329.         }
  330.         return this.defaultDataStreams.computeIfAbsent(quantity,
  331.                 (key) -> this.defaultDataSource.addStreamSI(quantity, 1.0, 1.0));
  332.     }

  333.     /**
  334.      * Returns data from a specific location as a subset from all data. An empty map is returned if no such data exists.
  335.      * @param location double; location in [m]
  336.      * @return data from a specific location
  337.      */
  338.     private SortedMap<Double, Map<DataStream<?>, Double>> getSpatialData(final double location)
  339.     {
  340.         return this.data.computeIfAbsent(location, (key) -> new TreeMap<>());
  341.     }

  342.     /**
  343.      * Returns data from a specific time as a subset of data from a specific location. An empty map is returned if no such data
  344.      * exists.
  345.      * @param spatialData Map&lt;Double, Map&lt;DataStream&lt;?&gt;, Double&gt;&gt;; spatially selected data
  346.      * @param time double; time in [s]
  347.      * @return data from a specific time, from data from a specific location
  348.      */
  349.     private Map<DataStream<?>, Double> getSpacioTemporalData(final Map<Double, Map<DataStream<?>, Double>> spatialData,
  350.             final double time)
  351.     {
  352.         return spatialData.computeIfAbsent(time, (key) -> new LinkedHashMap<>());
  353.     }

  354.     // **********************
  355.     // *** KERNEL METHODS ***
  356.     // **********************

  357.     /**
  358.      * Sets a default exponential kernel with infinite range, sigma = 300m, and tau = 30s.
  359.      */
  360.     public void setKernel()
  361.     {
  362.         setKernelSI(Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, new ExpKernelShape(DEFAULT_SIGMA, DEFAULT_TAU));
  363.     }

  364.     /**
  365.      * Sets an exponential kernel with infinite range.
  366.      * @param sigma double; spatial kernel size
  367.      * @param tau double; temporal kernel size
  368.      */
  369.     public void setKernelSI(final double sigma, final double tau)
  370.     {
  371.         setKernelSI(Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, sigma, tau);
  372.     }

  373.     /**
  374.      * Sets an exponential kernel with limited range.
  375.      * @param sigma double; spatial kernel size in [m]
  376.      * @param tau double; temporal kernel size in [s]
  377.      * @param xMax double; maximum spatial range in [m]
  378.      * @param tMax double; maximum temporal range in [s]
  379.      */
  380.     public void setKernelSI(final double sigma, final double tau, final double xMax, final double tMax)
  381.     {
  382.         setKernelSI(xMax, tMax, new ExpKernelShape(sigma, tau));
  383.     }

  384.     /**
  385.      * Sets a Gaussian kernel with infinite range.
  386.      * @param sigma double; spatial kernel size
  387.      * @param tau double; temporal kernel size
  388.      */
  389.     public void setGaussKernelSI(final double sigma, final double tau)
  390.     {
  391.         setGaussKernelSI(Double.POSITIVE_INFINITY, Double.POSITIVE_INFINITY, sigma, tau);
  392.     }

  393.     /**
  394.      * Sets a Gaussian kernel with limited range.
  395.      * @param sigma double; spatial kernel size in [m]
  396.      * @param tau double; temporal kernel size in [s]
  397.      * @param xMax double; maximum spatial range in [m]
  398.      * @param tMax double; maximum temporal range in [s]
  399.      */
  400.     public void setGaussKernelSI(final double sigma, final double tau, final double xMax, final double tMax)
  401.     {
  402.         setKernelSI(xMax, tMax, new GaussKernelShape(sigma, tau));
  403.     }

  404.     /**
  405.      * Sets a kernel with limited range and provided shape. The shape allows using non-exponential kernels.
  406.      * @param xMax double; maximum spatial range
  407.      * @param tMax double; maximum temporal range
  408.      * @param shape KernelShape; shape of the kernel
  409.      */
  410.     public synchronized void setKernelSI(final double xMax, final double tMax, final KernelShape shape)
  411.     {
  412.         this.kernel = new Kernel(xMax, tMax, shape);
  413.     }

  414.     /**
  415.      * Returns the wave speed in congestion.
  416.      * @return double; wave speed in congestion
  417.      */
  418.     final double getWaveSpeedCongestion()
  419.     {
  420.         return this.cCong;
  421.     }

  422.     /**
  423.      * Returns the wave speed in free flow.
  424.      * @return double; wave speed in free flow
  425.      */
  426.     final double getWaveSpeedFreeFlow()
  427.     {
  428.         return this.cFree;
  429.     }

  430.     // **********************
  431.     // *** FILTER METHODS ***
  432.     // **********************

  433.     /**
  434.      * Executes filtering in parallel. The returned listener can be used to report progress and wait until the filtering is
  435.      * done. Finally, the filtering results can then be obtained from the listener.
  436.      * @param location double[]; location of output grid in [m]
  437.      * @param time double[]; time of output grid in [s]
  438.      * @param quantities Quantity&lt;?, ?&gt;...; quantities to calculate filtered data of
  439.      * @return EgtfParallelListener; listener to notify keep track of the progress
  440.      */
  441.     public EgtfParallelListener filterParallelSI(final double[] location, final double[] time,
  442.             final Quantity<?, ?>... quantities)
  443.     {
  444.         Objects.requireNonNull(location, "Location may not be null.");
  445.         Objects.requireNonNull(time, "Time may not be null.");
  446.         EgtfParallelListener listener = new EgtfParallelListener();
  447.         addListener(listener);
  448.         new Thread(new Runnable()
  449.         {
  450.             /** {@inheritDoc} */
  451.             @Override
  452.             public void run()
  453.             {
  454.                 listener.setFilter(filterSI(location, time, quantities));
  455.                 removeListener(listener);
  456.             }
  457.         }, "Egtf calculation thread").start();
  458.         return listener;
  459.     }

  460.     /**
  461.      * Executes fast filtering in parallel. The returned listener can be used to report progress and wait until the filtering is
  462.      * done. Finally, the filtering results can then be obtained from the listener.
  463.      * @param xMin double; minimum location value of output grid [m]
  464.      * @param xStep double; location step of output grid [m]
  465.      * @param xMax double; maximum location value of output grid [m]
  466.      * @param tMin double; minimum time value of output grid [s]
  467.      * @param tStep double; time step of output grid [s]
  468.      * @param tMax double; maximum time value of output grid [s]
  469.      * @param quantities Quantity&lt;?, ?&gt;...; quantities to calculate filtered data of
  470.      * @return EgtfParallelListener; listener to notify keep track of the progress
  471.      */
  472.     public EgtfParallelListener filterParallelFastSI(final double xMin, final double xStep, final double xMax,
  473.             final double tMin, final double tStep, final double tMax, final Quantity<?, ?>... quantities)
  474.     {
  475.         EgtfParallelListener listener = new EgtfParallelListener();
  476.         addListener(listener);
  477.         new Thread(new Runnable()
  478.         {
  479.             /** {@inheritDoc} */
  480.             @Override
  481.             public void run()
  482.             {
  483.                 listener.setFilter(filterFastSI(xMin, xStep, xMax, tMin, tStep, tMax, quantities));
  484.                 removeListener(listener);
  485.             }
  486.         }, "Egtf calculation thread").start();
  487.         return listener;
  488.     }

  489.     /**
  490.      * Returns filtered data. This is the standard EGTF implementation.
  491.      * @param location double[]; location of output grid in [m]
  492.      * @param time double[]; time of output grid in [s]
  493.      * @param quantities Quantity&lt;?, ?&gt;...; quantities to calculate filtered data of
  494.      * @return Filter; filtered data, {@code null} when interrupted
  495.      */
  496.     @SuppressWarnings({"synthetic-access", "methodlength"})
  497.     public Filter filterSI(final double[] location, final double[] time, final Quantity<?, ?>... quantities)
  498.     {
  499.         Objects.requireNonNull(location, "Location may not be null.");
  500.         Objects.requireNonNull(time, "Time may not be null.");

  501.         // initialize data
  502.         Map<Quantity<?, ?>, double[][]> map = new LinkedHashMap<>();
  503.         for (Quantity<?, ?> quantity : quantities)
  504.         {
  505.             map.put(quantity, new double[location.length][time.length]);
  506.         }

  507.         // loop grid locations
  508.         for (int i = 0; i < location.length; i++)
  509.         {
  510.             double xGrid = location[i];

  511.             // filter applicable data for location
  512.             Map<Double, NavigableMap<Double, Map<DataStream<?>, Double>>> spatialData =
  513.                     this.data.subMap(this.kernel.fromLocation(xGrid), true, this.kernel.toLocation(xGrid), true);

  514.             // loop grid times
  515.             for (int j = 0; j < time.length; j++)
  516.             {
  517.                 double tGrid = time[j];

  518.                 // notify
  519.                 if (notifyListeners((i + (double) j / time.length) / location.length))
  520.                 {
  521.                     return null;
  522.                 }

  523.                 // initialize data per stream
  524.                 // quantity z assuming congestion and free flow
  525.                 Map<DataStream<?>, DualWeightedMean> zCongFree = new LinkedHashMap<>();

  526.                 // filter and loop applicable data for time
  527.                 for (Map.Entry<Double, NavigableMap<Double, Map<DataStream<?>, Double>>> xEntry : spatialData.entrySet())
  528.                 {
  529.                     double dx = xEntry.getKey() - xGrid;
  530.                     Map<Double, Map<DataStream<?>, Double>> temporalData =
  531.                             xEntry.getValue().subMap(this.kernel.fromTime(tGrid), true, this.kernel.toTime(tGrid), true);

  532.                     for (Map.Entry<Double, Map<DataStream<?>, Double>> tEntry : temporalData.entrySet())
  533.                     {
  534.                         double dt = tEntry.getKey() - tGrid;
  535.                         Map<DataStream<?>, Double> pData = tEntry.getValue();

  536.                         double phiCong = this.kernel.weight(this.cCong, dx, dt);
  537.                         double phiFree = this.kernel.weight(this.cFree, dx, dt);

  538.                         // loop streams data at point
  539.                         for (Map.Entry<DataStream<?>, Double> vEntry : pData.entrySet())
  540.                         {
  541.                             DataStream<?> stream = vEntry.getKey();
  542.                             if (map.containsKey(stream.getQuantity()) || stream.getQuantity().isSpeed())
  543.                             {
  544.                                 double v = vEntry.getValue();
  545.                                 DualWeightedMean zCongFreeOfStream =
  546.                                         zCongFree.computeIfAbsent(stream, (key) -> new DualWeightedMean());
  547.                                 zCongFreeOfStream.addCong(v, phiCong);
  548.                                 zCongFreeOfStream.addFree(v, phiFree);
  549.                             }
  550.                         }
  551.                     }
  552.                 }

  553.                 // figure out the congestion level estimated for each data source
  554.                 Map<DataSource, Double> w = new LinkedHashMap<>();
  555.                 for (Map.Entry<DataStream<?>, DualWeightedMean> streamEntry : zCongFree.entrySet())
  556.                 {
  557.                     DataStream<?> dataStream = streamEntry.getKey();
  558.                     if (dataStream.getQuantity().isSpeed()) // only one speed quantity allowed per data source
  559.                     {
  560.                         DualWeightedMean zCongFreeOfStream = streamEntry.getValue();
  561.                         double u = Math.min(zCongFreeOfStream.getCong(), zCongFreeOfStream.getFree());
  562.                         w.put(dataStream.getDataSource(), // 1 speed quantity per source allowed
  563.                                 .5 * (1.0 + Math.tanh((EGTF.this.vc - u) / EGTF.this.deltaV)));
  564.                         continue;
  565.                     }
  566.                 }

  567.                 // sum available data sources per quantity
  568.                 Double wMean = null;
  569.                 for (Map.Entry<Quantity<?, ?>, double[][]> qEntry : map.entrySet())
  570.                 {
  571.                     Quantity<?, ?> quantity = qEntry.getKey();
  572.                     WeightedMean z = new WeightedMean();
  573.                     for (Map.Entry<DataStream<?>, DualWeightedMean> zEntry : zCongFree.entrySet())
  574.                     {
  575.                         DataStream<?> dataStream = zEntry.getKey();
  576.                         if (dataStream.getQuantity().equals(quantity))
  577.                         {
  578.                             // obtain congestion level
  579.                             double wCong;
  580.                             if (!w.containsKey(dataStream.getDataSource()))
  581.                             {
  582.                                 // this data source has no speed data, but congestion level can be estimated from other sources
  583.                                 if (wMean == null)
  584.                                 {
  585.                                     // let's see if speed was estimated already
  586.                                     for (Quantity<?, ?> prevQuant : quantities)
  587.                                     {
  588.                                         if (prevQuant.equals(quantity))
  589.                                         {
  590.                                             // it was not, get mean of other data source
  591.                                             wMean = 0.0;
  592.                                             for (double ww : w.values())
  593.                                             {
  594.                                                 wMean += ww / w.size();
  595.                                             }
  596.                                             break;
  597.                                         }
  598.                                         else if (prevQuant.isSpeed())
  599.                                         {
  600.                                             wMean = .5 * (1.0
  601.                                                     + Math.tanh((EGTF.this.vc - map.get(prevQuant)[i][j]) / EGTF.this.deltaV));
  602.                                             break;
  603.                                         }
  604.                                     }
  605.                                 }
  606.                                 wCong = wMean;
  607.                             }
  608.                             else
  609.                             {
  610.                                 wCong = w.get(dataStream.getDataSource());
  611.                             }
  612.                             // calculate estimated value z of this data source (no duplicate quantities per source allowed)
  613.                             double wfree = 1.0 - wCong;
  614.                             DualWeightedMean zCongFreej = zEntry.getValue();
  615.                             double zStream = wCong * zCongFreej.getCong() + wfree * zCongFreej.getFree();
  616.                             double weight;
  617.                             if (w.size() > 1)
  618.                             {
  619.                                 // data source more important if more and nearer measurements
  620.                                 double beta = wCong * zCongFreej.getDenominatorCong() + wfree * zCongFreej.getDenominatorFree();
  621.                                 // more important if more reliable (smaller standard deviation) at congestion level
  622.                                 double alpha = wCong / dataStream.getThetaCong() + wfree / dataStream.getThetaFree();
  623.                                 weight = alpha * beta;
  624.                             }
  625.                             else
  626.                             {
  627.                                 weight = 1.0;
  628.                             }
  629.                             z.add(zStream, weight);
  630.                         }
  631.                     }
  632.                     qEntry.getValue()[i][j] = z.get();
  633.                 }
  634.             }
  635.         }
  636.         notifyListeners(1.0);

  637.         return new FilterDouble(location, time, map);
  638.     }

  639.     /**
  640.      * Returns filtered data that is processed using fast fourier transformation. This is much faster than the standard filter,
  641.      * at the cost that all input data is discretized to the output grid. The gain in computation speed is however such that
  642.      * finer output grids can be used to alleviate this. For discretization the output grid needs to be equidistant. It is
  643.      * recommended to set a Kernel with maximum bounds before using this method.
  644.      * <p>
  645.      * More than being a fast implementation of the Adaptive Smoothing Method, this implementation includes all data source like
  646.      * the Extended Generalized Treiber-Helbing Filter.
  647.      * @param xMin double; minimum location value of output grid [m]
  648.      * @param xStep double; location step of output grid [m]
  649.      * @param xMax double; maximum location value of output grid [m]
  650.      * @param tMin double; minimum time value of output grid [s]
  651.      * @param tStep double; time step of output grid [s]
  652.      * @param tMax double; maximum time value of output grid [s]
  653.      * @param quantities Quantity&lt;?, ?&gt;...; quantities to calculate filtered data of
  654.      * @return Filter; filtered data, {@code null} when interrupted
  655.      */
  656.     @SuppressWarnings("methodlength")
  657.     public Filter filterFastSI(final double xMin, final double xStep, final double xMax, final double tMin, final double tStep,
  658.             final double tMax, final Quantity<?, ?>... quantities)
  659.     {
  660.         if (xMin > xMax || xStep <= 0.0 || tMin > tMax || tStep <= 0.0)
  661.         {
  662.             throw new IllegalArgumentException(
  663.                     "Ill-defined grid. Make sure that xMax >= xMin, dx > 0, tMax >= tMin and dt > 0");
  664.         }
  665.         if (notifyListeners(0.0))
  666.         {
  667.             return null;
  668.         }

  669.         // initialize data
  670.         int n = 1 + (int) ((xMax - xMin) / xStep);
  671.         double[] location = new double[n];
  672.         IntStream.range(0, n).forEach(i -> location[i] = xMin + i * xStep);
  673.         n = 1 + (int) ((tMax - tMin) / tStep);
  674.         double[] time = new double[n];
  675.         IntStream.range(0, n).forEach(j -> time[j] = tMin + j * tStep);
  676.         Map<Quantity<?, ?>, double[][]> map = new LinkedHashMap<>();
  677.         Map<Quantity<?, ?>, double[][]> weights = new LinkedHashMap<>();
  678.         for (Quantity<?, ?> quantity : quantities)
  679.         {
  680.             map.put(quantity, new double[location.length][time.length]);
  681.             weights.put(quantity, new double[location.length][time.length]);
  682.         }

  683.         // discretize Kernel
  684.         double xFrom = this.kernel.fromLocation(0.0);
  685.         xFrom = Double.isInfinite(xFrom) ? 2.0 * (xMin - xMax) : xFrom;
  686.         double xTo = this.kernel.toLocation(0.0);
  687.         xTo = Double.isInfinite(xTo) ? 2.0 * (xMax - xMin) : xTo;
  688.         double[] dx = equidistant(xFrom, xStep, xTo);
  689.         double tFrom = this.kernel.fromTime(0.0);
  690.         tFrom = Double.isInfinite(tFrom) ? 2.0 * (tMin - tMax) : tFrom;
  691.         double tTo = this.kernel.toTime(0.0);
  692.         tTo = Double.isInfinite(tTo) ? 2.0 * (tMax - tMin) : tTo;
  693.         double[] dt = equidistant(tFrom, tStep, tTo);
  694.         double[][] phiCong = new double[dx.length][dt.length];
  695.         double[][] phiFree = new double[dx.length][dt.length];
  696.         for (int i = 0; i < dx.length; i++)
  697.         {
  698.             for (int j = 0; j < dt.length; j++)
  699.             {
  700.                 phiCong[i][j] = this.kernel.weight(this.cCong, dx[i], dt[j]);
  701.                 phiFree[i][j] = this.kernel.weight(this.cFree, dx[i], dt[j]);
  702.             }
  703.         }

  704.         // discretize data
  705.         Map<DataStream<?>, double[][]> dataSum = new LinkedHashMap<>();
  706.         Map<DataStream<?>, double[][]> dataCount = new LinkedHashMap<>(); // integer counts, must be double[][] for convolution
  707.         // loop grid locations
  708.         for (int i = 0; i < location.length; i++)
  709.         {
  710.             // filter applicable data for location
  711.             Map<Double, NavigableMap<Double, Map<DataStream<?>, Double>>> spatialData =
  712.                     this.data.subMap(location[i] - 0.5 * xStep, true, location[i] + 0.5 * xStep, true);
  713.             // loop grid times
  714.             for (int j = 0; j < time.length; j++)
  715.             {
  716.                 // filter and loop applicable data for time
  717.                 for (NavigableMap<Double, Map<DataStream<?>, Double>> locationData : spatialData.values())
  718.                 {
  719.                     NavigableMap<Double, Map<DataStream<?>, Double>> temporalData =
  720.                             locationData.subMap(time[j] - 0.5 * tStep, true, time[j] + 0.5 * tStep, true);
  721.                     for (Map<DataStream<?>, Double> timeData : temporalData.values())
  722.                     {
  723.                         for (Map.Entry<DataStream<?>, Double> timeEntry : timeData.entrySet())
  724.                         {
  725.                             if (map.containsKey(timeEntry.getKey().getQuantity()) || timeEntry.getKey().getQuantity().isSpeed())
  726.                             {
  727.                                 dataSum.computeIfAbsent(timeEntry.getKey(),
  728.                                         (key) -> new double[location.length][time.length])[i][j] += timeEntry.getValue();
  729.                                 dataCount.computeIfAbsent(timeEntry.getKey(),
  730.                                         (key) -> new double[location.length][time.length])[i][j]++;
  731.                             }
  732.                         }
  733.                     }
  734.                 }
  735.             }
  736.         }

  737.         // figure out the congestion level estimated for each data source
  738.         double steps = quantities.length + 1; // speed (for congestion level) and then all in quantities
  739.         double step = 0;
  740.         // store maps to prevent us from calculating the convolution for speed again later
  741.         Map<DataSource, double[][]> w = new LinkedHashMap<>();
  742.         Map<DataSource, double[][]> zCongSpeed = new LinkedHashMap<>();
  743.         Map<DataSource, double[][]> zFreeSpeed = new LinkedHashMap<>();
  744.         Map<DataSource, double[][]> nCongSpeed = new LinkedHashMap<>();
  745.         Map<DataSource, double[][]> nFreeSpeed = new LinkedHashMap<>();
  746.         for (Map.Entry<DataStream<?>, double[][]> zEntry : dataSum.entrySet())
  747.         {
  748.             DataStream<?> dataStream = zEntry.getKey();
  749.             if (dataStream.getQuantity().isSpeed()) // only one speed quantity allowed per data source
  750.             {
  751.                 // notify
  752.                 double[][] vCong = Convolution.convolution(phiCong, zEntry.getValue());
  753.                 if (notifyListeners((step + 0.25) / steps))
  754.                 {
  755.                     return null;
  756.                 }
  757.                 double[][] vFree = Convolution.convolution(phiFree, zEntry.getValue());
  758.                 if (notifyListeners((step + 0.5) / steps))
  759.                 {
  760.                     return null;
  761.                 }
  762.                 double[][] count = dataCount.get(dataStream);
  763.                 double[][] nCong = Convolution.convolution(phiCong, count);
  764.                 if (notifyListeners((step + 0.75) / steps))
  765.                 {
  766.                     return null;
  767.                 }
  768.                 double[][] nFree = Convolution.convolution(phiFree, count);
  769.                 double[][] wSource = new double[vCong.length][vCong[0].length];
  770.                 for (int i = 0; i < vCong.length; i++)
  771.                 {
  772.                     for (int j = 0; j < vCong[0].length; j++)
  773.                     {
  774.                         double u = Math.min(vCong[i][j] / nCong[i][j], vFree[i][j] / nFree[i][j]);
  775.                         wSource[i][j] = .5 * (1.0 + Math.tanh((EGTF.this.vc - u) / EGTF.this.deltaV));
  776.                     }
  777.                 }
  778.                 w.put(dataStream.getDataSource(), wSource);
  779.                 zCongSpeed.put(dataStream.getDataSource(), vCong);
  780.                 zFreeSpeed.put(dataStream.getDataSource(), vFree);
  781.                 nCongSpeed.put(dataStream.getDataSource(), nCong);
  782.                 nFreeSpeed.put(dataStream.getDataSource(), nFree);
  783.             }
  784.         }
  785.         step++;
  786.         if (notifyListeners(step / steps))
  787.         {
  788.             return null;
  789.         }

  790.         // sum available data sources per quantity
  791.         double[][] wMean = null;
  792.         for (Quantity<?, ?> quantity : quantities)
  793.         {
  794.             // gather place for this quantity
  795.             double[][] qData = map.get(quantity);
  796.             double[][] qWeights = weights.get(quantity);
  797.             // loop streams that provide this quantity
  798.             Set<Map.Entry<DataStream<?>, double[][]>> zEntries = new LinkedHashSet<>();
  799.             for (Map.Entry<DataStream<?>, double[][]> zEntry : dataSum.entrySet())
  800.             {
  801.                 if (zEntry.getKey().getQuantity().equals(quantity))
  802.                 {
  803.                     zEntries.add(zEntry);
  804.                 }
  805.             }
  806.             double streamCounter = 0;
  807.             for (Map.Entry<DataStream<?>, double[][]> zEntry : zEntries)
  808.             {
  809.                 DataStream<?> dataStream = zEntry.getKey();

  810.                 // obtain congestion level
  811.                 double[][] wj;
  812.                 if (!w.containsKey(dataStream.getDataSource()))
  813.                 {
  814.                     // this data source has no speed data, but congestion level can be estimated from other sources
  815.                     if (wMean == null)
  816.                     {
  817.                         // let's see if speed was estimated already
  818.                         for (Quantity<?, ?> prevQuant : quantities)
  819.                         {
  820.                             if (prevQuant.equals(quantity))
  821.                             {
  822.                                 // it was not, get mean of other data source
  823.                                 wMean = new double[location.length][time.length];
  824.                                 for (double[][] ww : w.values())
  825.                                 {
  826.                                     for (int i = 0; i < location.length; i++)
  827.                                     {
  828.                                         for (int j = 0; j < time.length; j++)
  829.                                         {
  830.                                             wMean[i][j] += ww[i][j] / w.size();
  831.                                         }
  832.                                     }
  833.                                 }
  834.                                 break;
  835.                             }
  836.                             else if (prevQuant.isSpeed())
  837.                             {
  838.                                 wMean = new double[location.length][time.length];
  839.                                 double[][] v = map.get(prevQuant);
  840.                                 for (int i = 0; i < location.length; i++)
  841.                                 {
  842.                                     for (int j = 0; j < time.length; j++)
  843.                                     {
  844.                                         wMean[i][j] = .5 * (1.0 + Math.tanh((EGTF.this.vc - v[i][j]) / EGTF.this.deltaV));
  845.                                     }
  846.                                 }
  847.                                 break;
  848.                             }
  849.                         }
  850.                     }
  851.                     wj = wMean;
  852.                 }
  853.                 else
  854.                 {
  855.                     wj = w.get(dataStream.getDataSource());
  856.                 }

  857.                 // convolutions of filters with discretized data and data counts
  858.                 double[][] zCong;
  859.                 double[][] zFree;
  860.                 double[][] nCong;
  861.                 double[][] nFree;
  862.                 if (dataStream.getQuantity().isSpeed())
  863.                 {
  864.                     zCong = zCongSpeed.get(dataStream.getDataSource());
  865.                     zFree = zFreeSpeed.get(dataStream.getDataSource());
  866.                     nCong = nCongSpeed.get(dataStream.getDataSource());
  867.                     nFree = nFreeSpeed.get(dataStream.getDataSource());
  868.                 }
  869.                 else
  870.                 {
  871.                     zCong = Convolution.convolution(phiCong, zEntry.getValue());
  872.                     if (notifyListeners((step + (streamCounter + 0.25) / zEntries.size()) / steps))
  873.                     {
  874.                         return null;
  875.                     }
  876.                     zFree = Convolution.convolution(phiFree, zEntry.getValue());
  877.                     if (notifyListeners((step + (streamCounter + 0.5) / zEntries.size()) / steps))
  878.                     {
  879.                         return null;
  880.                     }
  881.                     double[][] count = dataCount.get(dataStream);
  882.                     nCong = Convolution.convolution(phiCong, count);
  883.                     if (notifyListeners((step + (streamCounter + 0.75) / zEntries.size()) / steps))
  884.                     {
  885.                         return null;
  886.                     }
  887.                     nFree = Convolution.convolution(phiFree, count);
  888.                 }

  889.                 // loop grid to add to each weighted sum (weighted per data source)
  890.                 for (int i = 0; i < location.length; i++)
  891.                 {
  892.                     for (int j = 0; j < time.length; j++)
  893.                     {
  894.                         double wCong = wj[i][j];
  895.                         double wFree = 1.0 - wCong;
  896.                         double value = wCong * zCong[i][j] / nCong[i][j] + wFree * zFree[i][j] / nFree[i][j];
  897.                         // the fast filter supplies convoluted data counts, i.e. amount of data and filter proximity; this
  898.                         // is exactly what the EGTF method needs to weigh data sources
  899.                         double beta = wCong * nCong[i][j] + wFree * nFree[i][j];
  900.                         double alpha = wCong / dataStream.getThetaCong() + wFree / dataStream.getThetaFree();
  901.                         double weight = beta * alpha;
  902.                         qData[i][j] += (value * weight);
  903.                         qWeights[i][j] += weight;
  904.                     }
  905.                 }
  906.                 streamCounter++;
  907.                 if (notifyListeners((step + streamCounter / zEntries.size()) / steps))
  908.                 {
  909.                     return null;
  910.                 }
  911.             }
  912.             for (int i = 0; i < location.length; i++)
  913.             {
  914.                 for (int j = 0; j < time.length; j++)
  915.                 {
  916.                     qData[i][j] /= qWeights[i][j];
  917.                 }
  918.             }
  919.             step++;
  920.         }

  921.         return new FilterDouble(location, time, map);
  922.     }

  923.     /**
  924.      * Returns an equidistant vector that includes 0.
  925.      * @param from double; lowest value to include
  926.      * @param step double; step
  927.      * @param to double; highest value to include
  928.      * @return double[]; equidistant vector that includes 0
  929.      */
  930.     private double[] equidistant(final double from, final double step, final double to)
  931.     {
  932.         int n1 = (int) (-from / step);
  933.         int n2 = (int) (to / step);
  934.         int n = n1 + n2 + 1;
  935.         double[] array = new double[n];
  936.         for (int i = 0; i < n; i++)
  937.         {
  938.             array[i] = i < n1 ? step * (-n1 + i) : step * (i - n1);
  939.         }
  940.         return array;
  941.     }

  942.     // *********************
  943.     // *** EVENT METHODS ***
  944.     // *********************

  945.     /**
  946.      * Interrupt the calculation.
  947.      */
  948.     public final void interrupt()
  949.     {
  950.         this.interrupted = true;
  951.     }

  952.     /**
  953.      * Add listener.
  954.      * @param listener EgtfListener; listener
  955.      */
  956.     public final void addListener(final EgtfListener listener)
  957.     {
  958.         this.listeners.add(listener);
  959.     }

  960.     /**
  961.      * Remove listener.
  962.      * @param listener EgtfListener; listener
  963.      */
  964.     public final void removeListener(final EgtfListener listener)
  965.     {
  966.         this.listeners.remove(listener);
  967.     }

  968.     /**
  969.      * Notify all listeners.
  970.      * @param progress double; progress, a value in the range [0 ... 1]
  971.      * @return boolean; whether the filter is interrupted
  972.      */
  973.     private boolean notifyListeners(final double progress)
  974.     {
  975.         if (!this.listeners.isEmpty())
  976.         {
  977.             EgtfEvent event = new EgtfEvent(this, progress);
  978.             for (EgtfListener listener : this.listeners)
  979.             {
  980.                 listener.notifyProgress(event);
  981.             }
  982.         }
  983.         return this.interrupted;
  984.     }

  985.     // **********************
  986.     // *** HELPER CLASSES ***
  987.     // **********************

  988.     /**
  989.      * Small class to build up a weighted mean under the congestion and free flow assumption.
  990.      */
  991.     private class DualWeightedMean
  992.     {
  993.         /** Cumulative congestion numerator of weighted mean fraction, i.e. weighted sum. */
  994.         private double numeratorCong;

  995.         /** Cumulative free flow numerator of weighted mean fraction, i.e. weighted sum. */
  996.         private double numeratorFree;

  997.         /** Cumulative congestion denominator of weighted mean fraction, i.e. sum of weights. */
  998.         private double denominatorCong;

  999.         /** Cumulative free flow denominator of weighted mean fraction, i.e. sum of weights. */
  1000.         private double denominatorFree;

  1001.         /**
  1002.          * Adds a congestion value with weight.
  1003.          * @param value double; value
  1004.          * @param weight double; weight
  1005.          */
  1006.         public void addCong(final double value, final double weight)
  1007.         {
  1008.             this.numeratorCong += value * weight;
  1009.             this.denominatorCong += weight;
  1010.         }

  1011.         /**
  1012.          * Adds a free flow value with weight.
  1013.          * @param value double; value
  1014.          * @param weight double; weight
  1015.          */
  1016.         public void addFree(final double value, final double weight)
  1017.         {
  1018.             this.numeratorFree += value * weight;
  1019.             this.denominatorFree += weight;
  1020.         }

  1021.         /**
  1022.          * Returns the weighted congestion mean of available data.
  1023.          * @return double; weighted mean of available data
  1024.          */
  1025.         public double getCong()
  1026.         {
  1027.             return this.numeratorCong / this.denominatorCong;
  1028.         }

  1029.         /**
  1030.          * Returns the weighted free flow mean of available data.
  1031.          * @return double; weighted free flow mean of available data
  1032.          */
  1033.         public double getFree()
  1034.         {
  1035.             return this.numeratorFree / this.denominatorFree;
  1036.         }

  1037.         /**
  1038.          * Returns the sum of congestion weights.
  1039.          * @return double; the sum of congestion weights
  1040.          */
  1041.         public double getDenominatorCong()
  1042.         {
  1043.             return this.denominatorCong;
  1044.         }

  1045.         /**
  1046.          * Returns the sum of free flow weights.
  1047.          * @return double; the sum of free flow weights
  1048.          */
  1049.         public double getDenominatorFree()
  1050.         {
  1051.             return this.denominatorFree;
  1052.         }

  1053.         /** {@inheritDoc} */
  1054.         @Override
  1055.         public String toString()
  1056.         {
  1057.             return "DualWeightedMean [numeratorCong=" + this.numeratorCong + ", numeratorFree=" + this.numeratorFree
  1058.                     + ", denominatorCong=" + this.denominatorCong + ", denominatorFree=" + this.denominatorFree + "]";
  1059.         }

  1060.     }

  1061.     /**
  1062.      * Small class to build up a weighted mean.
  1063.      */
  1064.     private class WeightedMean
  1065.     {
  1066.         /** Cumulative numerator of weighted mean fraction, i.e. weighted sum. */
  1067.         private double numerator;

  1068.         /** Cumulative denominator of weighted mean fraction, i.e. sum of weights. */
  1069.         private double denominator;

  1070.         /**
  1071.          * Adds a value with weight.
  1072.          * @param value double; value
  1073.          * @param weight double; weight
  1074.          */
  1075.         public void add(final double value, final double weight)
  1076.         {
  1077.             this.numerator += value * weight;
  1078.             this.denominator += weight;
  1079.         }

  1080.         /**
  1081.          * Returns the weighted mean of available data.
  1082.          * @return double; weighted mean of available data
  1083.          */
  1084.         public double get()
  1085.         {
  1086.             return this.numerator / this.denominator;
  1087.         }

  1088.         /** {@inheritDoc} */
  1089.         @Override
  1090.         public String toString()
  1091.         {
  1092.             return "WeightedMean [numerator=" + this.numerator + ", denominator=" + this.denominator + "]";
  1093.         }

  1094.     }

  1095.     /** {@inheritDoc} */
  1096.     @Override
  1097.     public String toString()
  1098.     {
  1099.         return "EGTF [kernel=" + this.kernel + ", cCong=" + this.cCong + ", cFree=" + this.cFree + ", deltaV=" + this.deltaV
  1100.                 + ", vc=" + this.vc + ", dataSources=" + this.dataSources + ", data=" + this.data + ", interrupted="
  1101.                 + this.interrupted + ", listeners=" + this.listeners + "]";
  1102.     }

  1103. }