FFSM++  1.1.0
French Forest Sector Model ++
ModelCoreSpatial.cpp
Go to the documentation of this file.
1 /***************************************************************************
2  * Copyright (C) 2015 by Laboratoire d'Economie Forestière *
3  * http://ffsm-project.org *
4  * *
5  * This program is free software; you can redistribute it and/or modify *
6  * it under the terms of the GNU General Public License as published by *
7  * the Free Software Foundation; either version 3 of the License, or *
8  * (at your option) any later version, given the compliance with the *
9  * exceptions listed in the file COPYING that is distribued together *
10  * with this file. *
11  * *
12  * This program is distributed in the hope that it will be useful, *
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of *
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
15  * GNU General Public License for more details. *
16  * *
17  * You should have received a copy of the GNU General Public License *
18  * along with this program; if not, write to the *
19  * Free Software Foundation, Inc., *
20  * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
21  ***************************************************************************/
22 #include <cmath>
23 #include <algorithm>
24 #include <tr1/array>
25 
26 
27 #include "IpIpoptApplication.hpp"
28 #include "IpSolveStatistics.hpp"
29 
30 #include "ModelCoreSpatial.h"
31 #include "ModelData.h"
32 #include "ThreadManager.h"
33 #include "Opt.h"
34 #include "Scheduler.h"
35 #include "Gis.h"
36 #include "Carbon.h"
37 
38 
40  MTHREAD = MTHREAD_h;
41 }
42 
44 
45 }
46 
47 void
49  cacheSettings(); ///< cashe things like first year, second year, dClasses...
50  cacheDynamicSettings(); // cache settings that may change every year
51  initializePixelVolumes(); ///< compute px volumes vol for 2005 (including exogenous loaded volumes)
52  assignSpMultiplierPropToVols(); // assign the spatial multiplier (used in the time of return) based no more on a Normal distribution but on the volumes present in the pixel: more volume, more the pixel is fit for the ft
53  initMarketModule(); ///< inside it uses first year, second year
55  MTHREAD->DO->print(true);
56  MTHREAD->SCD->advanceYear(); ///< 2005->2006
57  int thisYear = MTHREAD->SCD->getYear(); // for debugging
58  msgOut(MSG_INFO, "### 2013 "+i2s(thisYear)+" year started..");
59  resetPixelValues(); ///< swap volumes->lagged_volumes and reset the other pixel vectors
60  cachePixelExogenousData(); ///< compute pixel tp, meta and mort
61  computeInventary(); ///< in=f(vol_t-1)
62  //printDebugInitRegionalValues();
63  computeCumulativeData(); ///< compute cumTp_exp, vHa_exp, vHa
64  initializePixelArea(); ///< compute px->area for each ft and dc (including exogenous loaded areas)
67  updateMapAreas(); ///< update the forArea_{ft} layer on each pixel as old value-hArea+regArea
68  updateOtherMapData(); ///< update (if the layer exists) other gis-based data, as volumes and expected returns, taking them from the data in the px object
69  sumRegionalForData(); ///< only for printing stats as forest data is never used at regional level
71  //MTHREAD->DO->printDebugPixelValues(); // uncomment to enable pixel-level debugging // moved to output->print
72 
73  MTHREAD->DO->print();
74 }
75 
76 void
78  int thisYear = MTHREAD->SCD->getYear(); // for debugging
79  cacheDynamicSettings(); // cache settings that may change every year
80  resetPixelValues(); // swap volumes->lagged_volumes and reset the other pixel vectors
81  cachePixelExogenousData(); // compute pixel tp, meta and mort
82  computeInventary(); // in=f(vol_t-1)
83  runMarketModule(); // RUN THE MARKET OPTIMISATION HERE
84  computeCumulativeData(); // compute cumTp_exp, vHa_exp
88  //MTHREAD->DO->printDebugPixelValues(); // uncomment to enable pixel-level debugging // moved to output->print
90  updateOtherMapData(); // update (if the layer exists) other gis-based data, as volumes and expected returns, taking them from the data in the px object
91  sumRegionalForData(); // only for printing stats as forest data is never used at regional level
93  computeEconomicBalances(); // policy costs and welfare analysis
94  MTHREAD->DO->print(false);
95 }
96 
97 void
99  msgOut(MSG_INFO, "Starting market module (init stage)..");
100 
101  for(uint i=0;i<regIds2.size();i++){
102  int r2 = regIds2[i];
103  //RPAR('pl',i,p_tr,t-1) = sum(p_pr, a(p_pr,p_tr)*RPAR('pl',i,p_pr,t-1))+m(i,p_tr);
104  for(uint sp=0;sp<secProducts.size();sp++){
105  double value = 0;
106  for (uint pp=0;pp<priProducts.size();pp++){
107  value += gpd("pl",r2,priProducts[pp],secondYear)*
108  gpd("a",r2,priProducts[pp],secondYear,secProducts[sp]);
109  }
110  value += (gpd("m",r2,secProducts[sp],secondYear)* gpd("pol_trSub",r2,secProducts[sp],secondYear));
111  spd(value,"pl",r2,secProducts[sp],secondYear,true);
112  }
113  // RPAR('dl',i,p_pr,t-1) = sum(p_tr, a(p_pr,p_tr)*RPAR('sl',i,p_tr,t-1));
114  for (uint pp=0;pp<priProducts.size();pp++){
115  double value=0;
116  for(uint sp=0;sp<secProducts.size();sp++){
117  value += gpd("sl",r2,secProducts[sp],secondYear)*
118  gpd("a",r2,priProducts[pp],secondYear, secProducts[sp]);
119  }
120  spd(value,"dl",r2,priProducts[pp],secondYear,true);
121  double stvalue = gpd("sl",r2,priProducts[pp],secondYear)
122  + gpd("sa",r2,priProducts[pp],secondYear);
123  spd(stvalue,"st",r2,priProducts[pp],secondYear,true);
124  double supply_fromForestShare = gpd("supply_fromForestShare",r2,priProducts[pp],secondYear);
125  spd(stvalue*supply_fromForestShare,"st_fromHarv",r2,priProducts[pp],secondYear,true); // needed for the biological module in year starty+1
126  }
127  // RPAR('st',i,prd,t-1) = RPAR('sl',i,prd,t-1)+RPAR('sa',i,prd,t-1);
128  // RPAR('dt',i,prd,t-1) = RPAR('dl',i,prd,t-1)+RPAR('da',i,prd,t-1);
129  for (uint ap=0;ap<allProducts.size();ap++){
130  //double debug = gpd("dl",r2,allProducts[ap],secondYear);
131  double dtvalue = gpd("dl",r2,allProducts[ap],secondYear)
132  + gpd("da",r2,allProducts[ap],secondYear);
133 
134  spd(dtvalue,"dt",r2,allProducts[ap],secondYear,true);
135  }
136 
137  // q1(i,p_tr) = 1/(1+((RPAR('dl',i,p_tr,t-1)/RPAR('da',i,p_tr,t-1))**(1/psi(i,p_tr)))*(RPAR('pl',i,p_tr,t-1)/PT(p_tr,t-1)));
138  // p1(i,p_tr) = 1-q1(i,p_tr);
139  // RPAR('dc',i,p_tr,t-1) = (q1(i,p_tr)*RPAR('da',i,p_tr,t-1)**((psi(i,p_tr)-1)/psi(i,p_tr))+ p1(i,p_tr)*RPAR('dl',i,p_tr,t-1)**((psi(i,p_tr)-1)/psi(i,p_tr)))**(psi(i,p_tr)/(psi(i,p_tr)-1));
140  // RPAR('pc',i,p_tr,t-1) = (RPAR('da',i,p_tr,t-1)/RPAR('dc',i,p_tr,t-1))*PT(p_tr,t-1)+(RPAR('dl',i,p_tr,t-1)/RPAR('dc',i,p_tr,t-1))*RPAR('pl',i,p_tr,t-1);
141  // RPAR('pc',i,p_pr,t-1) = (RPAR('sa',i,p_pr,t-1)/RPAR('sc',i,p_pr,t-1))*PT(p_pr,t-1)+(RPAR('sl',i,p_pr,t-1)/RPAR('sc',i,p_pr,t-1))*RPAR('pl',i,p_pr,t-1);
142  // RPAR('pw',i,p_tr,t-1) = (RPAR('dl',i,p_tr,t-1)*RPAR('pl',i,p_tr,t-1)+RPAR('da',i,p_tr,t-1)*PT(p_tr,t-1))/RPAR('dt',i,p_tr,t-1) ; //changed 20120419
143  // K(i,p_tr,t-1) = k1(i,p_tr)*RPAR('sl',i,p_tr,t-1);
144  for(uint sp=0;sp<secProducts.size();sp++){
145  double psi = gpd("psi",r2,secProducts[sp],secondYear);
146  double dl = gpd("dl",r2,secProducts[sp],secondYear);
147  double da = gpd("da",r2,secProducts[sp],secondYear);
148  double pl = gpd("pl",r2,secProducts[sp],secondYear);
149  double sa = gpd("sa",r2,secProducts[sp],secondYear);
150  double sl = gpd("sl",r2,secProducts[sp],secondYear);
151  double k1 = gpd("k1",r2,secProducts[sp],secondYear);
152  double pWo = gpd("pl",WL2,secProducts[sp],secondYear); // World price (local price for region 99999)
153 
154  double stvalue = sl+sa;
155  double q1 = 1/ ( 1+pow(dl/da,1/psi)*(pl/pWo) );
156  double p1 = 1-q1;
157  double dc = pow(
158  q1*pow(da,(psi-1)/psi) + p1*pow(dl,(psi-1)/psi)
159  ,
160  psi/(psi-1)
161  );
162  double pc = (da/dc)*pWo
163  +(dl/dc)*pl;
164  double pw = (dl*pl+da*pWo)/(dl+da);
165  double k = k1*sl;
166 
167  spd(stvalue,"st",r2,secProducts[sp],secondYear,true);
168  spd(q1,"q1",r2,secProducts[sp],firstYear,true);
169  //spd(p1,"p1",r2,secProducts[sp],firstYear,true);
170  spd(dc,"dc",r2,secProducts[sp],secondYear,true);
171  spd(pc,"pc",r2,secProducts[sp],secondYear,true);
172  spd(pw,"pw",r2,secProducts[sp],secondYear,true);
173  spd(k,"k",r2,secProducts[sp],secondYear,true);
174  }
175 
176  // t1(i,p_pr) = 1/(1+((RPAR('sl',i,p_pr,t-1)/RPAR('sa',i,p_pr,t-1))**(1/eta(i,p_pr)))*(RPAR('pl',i,p_pr,t-1)/PT(p_pr,t-1)));
177  // r1(i,p_pr) = 1-t1(i,p_pr);
178  // RPAR('sc',i,p_pr,t-1) = (t1(i,p_pr)*RPAR('sa',i,p_pr,t-1)**((eta(i,p_pr)-1)/eta(i,p_pr))+ r1(i,p_pr)*RPAR('sl',i,p_pr,t-1)**((eta(i,p_pr)-1)/eta(i,p_pr)))**(eta(i,p_pr)/(eta(i,p_pr)-1))
179  // RPAR('pc',i,p_pr,t-1) = (RPAR('sa',i,p_pr,t-1)/RPAR('sc',i,p_pr,t-1))*PT(p_pr,t-1)+(RPAR('sl',i,p_pr,t-1)/RPAR('sc',i,p_pr,t-1))*RPAR('pl',i,p_pr,t-1);
180  // RPAR('pw',i,p_pr,t-1) = (RPAR('sl',i,p_pr,t-1)*RPAR('pl',i,p_pr,t-1)+RPAR('sa',i,p_pr,t-1)*PT(p_pr,t-1))/RPAR('st',i,p_pr,t-1) ; //changed 20120419
181  for(uint pp=0;pp<priProducts.size();pp++){
182 
183  double sl = gpd("sl",r2,priProducts[pp],secondYear);
184  double sa = gpd("sa",r2,priProducts[pp],secondYear);
185  double eta = gpd("eta",r2,priProducts[pp],secondYear);
186  double pl = gpd("pl",r2,priProducts[pp],secondYear);
187  double pWo = gpd("pl",WL2,priProducts[pp],secondYear); // World price (local price for region 99999)
188 
189 
190  double t1 = 1/ ( 1+(pow(sl/sa,1/eta))*(pl/pWo) );
191  double r1 = 1-t1;
192  double sc = pow(
193  t1*pow(sa,(eta-1)/eta) + r1*pow(sl,(eta-1)/eta)
194  ,
195  eta/(eta-1)
196  );
197  double pc = (sa/sc)*pWo+(sl/sc)*pl;
198  double pw = (sl*pl+sa*pWo)/(sl+sa);
199 
200  spd(t1,"t1",r2,priProducts[pp],firstYear,true);
201  //spd(r1,"r1",r2,priProducts[pp],firstYear,true);
202  spd(sc,"sc",r2,priProducts[pp],secondYear,true);
203  spd(pc,"pc",r2,priProducts[pp],secondYear,true);
204  spd(pw,"pw",r2,priProducts[pp],secondYear,true);
205  }
206 
207  // up to here tested with gams output on 20120628, that's fine !!
208  } // end for each region in level 2
209 
210 
211  // initializing the exports to zero quantities
212  // initializing of the transport cost for the same region to one and distance to zero
213  for(uint r1=0;r1<l2r.size();r1++){
214  for(uint r2=0;r2<l2r[r1].size();r2++){
215  for(uint p=0;p<allProducts.size();p++){
216  for(uint r2To=0;r2To<l2r[r1].size();r2To++){
217  spd(0,"rt",l2r[r1][r2],allProducts[p],secondYear,true,i2s(l2r[r1][r2To])); // regional trade, it was exp in gams
218  if(l2r[r1][r2] == l2r[r1][r2To]){
219  spd(1,"ct",l2r[r1][r2],allProducts[p],firstYear,true,i2s(l2r[r1][r2To])); // as long this value is higher than zero, rt within the same region is not choosen by the solver, so the value doesn't really matters. If it is zero, the solver still works and results are the same, but reported rt within the region are crazy high (100000)
220  }
221  }
222  } // end each product
223 
224  for(uint r2To=0;r2To<l2r[r1].size();r2To++){
225  if(l2r[r1][r2] == l2r[r1][r2To]){
226  spd(0,"dist",l2r[r1][r2],"",firstYear,true,i2s(l2r[r1][r2To])); // setting distance zero in code, so no need to put it in the data
227  }
228  }
229  } // end of r2 regions
230  } // end of r1 region
231 }
232 
233 void
235  msgOut(MSG_INFO, "Starting market module");
236  static double cumOverHarvesting = 0.0;
237  int thisYear = MTHREAD->SCD->getYear();
238  int previousYear = MTHREAD->SCD->getYear()-1;
239  bool useDeathTimber = MTHREAD->MD->getBoolSetting("useDeathTimber",DATA_NOW);
240 
241  // Added for carbon poject -------------------------------
242  double intRate = MTHREAD->MD->getDoubleSetting("ir");
243 
244  // ----------------------------------------------------------------
245 
246  // *** PRE-OPTIMISATION YEARLY OPERATIONS..
247  for(uint i=0;i<regIds2.size();i++){
248  int r2 = regIds2[i];
249  for(uint sp=0;sp<secProducts.size();sp++){
250  double g1 = gpd("g1",r2,secProducts[sp],previousYear);
251  double sigma = gpd("sigma",r2,secProducts[sp]);
252  double pc_1 = gpd("pc",r2,secProducts[sp],previousYear);
253  double dc_1 = gpd("dc",r2,secProducts[sp],previousYear);
254  double k_1 = gpd("k",r2,secProducts[sp],previousYear);
255  double pol_mktDirInt_d = gpd("pol_mktDirInt_d",r2,secProducts[sp]);
256  double pol_mktDirInt_d_1 = gpd("pol_mktDirInt_d",r2,secProducts[sp],previousYear);
257  double q1 = gpd("q1",r2,secProducts[sp]);
258  double pol_mktStr_d = gpd("pol_mktStr_d",r2,secProducts[sp]);
259 
260  double q1_polCorr = min(1.0, (q1-0.5)*pol_mktStr_d+0.5); // The pol_mktStr_s coefficient makes t1 (the "b" weight in the CES function) converge toward 0.5 as pol_mktStr_s goes to 0. As pol_mktStr_s can be above 1, an upper cap of unity is imposed to t1
261 
262 
263 
264  double k = (1+g1)*k_1;
265  double aa = (sigma/(sigma+1))*pc_1*pow(dc_1,-1/sigma) * pol_mktDirInt_d_1/pol_mktDirInt_d ;
266  double gg = dc_1*pow(pc_1*pol_mktDirInt_d_1,-sigma); //alpha
267 
268  spd(k, "k" ,r2,secProducts[sp]);
269  spd(aa,"aa",r2,secProducts[sp],DATA_NOW,true);
270  spd(gg,"gg",r2,secProducts[sp],DATA_NOW,true);
271  spd(q1_polCorr,"q1_polCorr",r2,secProducts[sp],DATA_NOW,true);
272 
273  }
274 
275  // BB(i,p_pr) = (sigma(p_pr)/(sigma(p_pr)+1))*RPAR('pc',i,p_pr,t-1)*(RPAR('sc',i,p_pr,t-1)**(-1/sigma(p_pr)))*(In(i,p_pr,t-1)/In(i,p_pr,t))**(gamma(p_pr)/sigma(p_pr));
276  // FF(i,p_pr) = RPAR('sc',i,p_pr,t-1)*((RPAR('pc',i,p_pr,t-1))**(-sigma(p_pr)))*(In(i,p_pr,t)/In(i,p_pr,t-1))**(gamma(p_pr)); //chi
277  for(uint pp=0;pp<priProducts.size();pp++){
278  double gamma_incr = gpd("gamma_incr",r2,priProducts[pp]); // elast supply to increasing stocks
279  double gamma_decr = gpd("gamma_decr",r2,priProducts[pp]); // elast supply to decreasing stocks
280  double gamma_d_incr = gpd("gamma_d_incr",r2,priProducts[pp]); // elast supply to increasing stocks death timber
281  double gamma_d_decr = gpd("gamma_d_decr",r2,priProducts[pp]); // elast supply to decreasing stocks death timber
282 
283  double sigma = gpd("sigma",r2,priProducts[pp]); // elast supply to price
284  double pc_1 = gpd("pc",r2,priProducts[pp],previousYear);
285  double sc_1 = gpd("sc",r2,priProducts[pp],previousYear);
286  double in = gpd("in",r2,priProducts[pp]);
287  double in_0 = gpd("in",r2,priProducts[pp],MTHREAD->MD->getIntSetting("initialYear"));
288  double in_1 = gpd("in",r2,priProducts[pp],previousYear);
289  double in_d =gpd("in_deathTimber",r2,priProducts[pp]);
290  double in_d_1 ;
291  if (thisYear == MTHREAD->MD->getIntSetting("initialOptYear")){
292  in_d_1 = gpd("in_deathTimber",r2,priProducts[pp],thisYear);
293  } else {
294  in_d_1 = gpd("in_deathTimber",r2,priProducts[pp],previousYear);
295  }
296  double min_in_d = max(0.00001,in_0 * MTHREAD->MD->getDoubleSetting("minShareDeathIn"));
297 
298 
299  double pol_mktDirInt_s = gpd("pol_mktDirInt_s",r2,priProducts[pp]);
300  double pol_mktDirInt_s_1 = gpd("pol_mktDirInt_s",r2,priProducts[pp],previousYear);
301  double t1 = gpd("t1",r2,priProducts[pp]);
302  double pol_mktStr_s = gpd("pol_mktStr_s",r2,priProducts[pp]);
303 
304 
305  // Added for carbon project ------------------------------------------
306  double carbonPrice_1 = gpd("carbonPrice",r2,"",previousYear); //using dummy region until Anto solves issue // Antonello: solved but also generalised
307  double omega = gpd("co2content_products", r2, priProducts[pp])/1000; //kg to tons
308  double Pct_1 = carbonPrice_1*intRate;
309 
310  // -------------------------------------------------------------------
311 
312 
313  double t1_polCorr = min(1.0, (t1-0.5)*pol_mktStr_s+0.5); // The pol_mktStr_s coefficient makes t1 (the "b" weight in the CES function) converge toward 0.5 as pol_mktStr_s goes to 0. As pol_mktStr_s can be above 1, an upper cap of unity is imposed to t1
314 
315 
316  double gamma = in>in_1 ? gamma_incr: gamma_decr; // If inventories resources are decreasing we use a given elasticity, if they are increasing we use an other one
317  double gamma_d = in_d>in_d_1 ? gamma_d_incr: gamma_d_decr; // If inventories resources are decreasing we use a given elasticity, if they are increasing we use an other one
318 
319 
320  double in_ratio = in/in_1;
321  double in_d_ratio = useDeathTimber ? max(min_in_d,in_d) / max(min_in_d, in_d_1 ) : 1.0 ; // to avoid depending on the ratio of too small variations in mortality
322 
323  //msgOut(MSG_INFO,"in_d: "+d2s(in_d));
324  //msgOut(MSG_INFO,"in_d_1: "+d2s(in_d_1));
325  //msgOut(MSG_INFO,"in: "+d2s(in));
326  //msgOut(MSG_INFO,"in_1: "+d2s(in_1));
327  //msgOut(MSG_INFO,"in_d_ratio: "+d2s(in_d_ratio));
328 
329 #ifdef QT_DEBUG
330 if (in_d_ratio < 0.01){
331  msgOut(MSG_CRITICAL_ERROR,"Very low in_d_ratio ");
332 }
333 if (in_d_ratio > 100){
334  msgOut(MSG_CRITICAL_ERROR,"Very high in_d_ratio ");
335 }
336 #endif
337 
338  double bb = (sigma/(sigma+1.0))*pc_1*pow(sc_1,-1.0/sigma)*pow(1/in_ratio,gamma/sigma)*pow(1.0,1.0/sigma) * pol_mktDirInt_s_1/pol_mktDirInt_s ;
339 
340  /* Old FF (without carbon)
341  double ff = sc_1*pow(pc_1*pol_mktDirInt_s_1,-sigma)*pow(in_ratio,gamma); //chi
342  */
343 
344  // Added for carbon project
345  double ff = sc_1*pow((pc_1 - omega * Pct_1)*pol_mktDirInt_s_1,-sigma)*pow(in_ratio,gamma)*pow(in_d_ratio,gamma_d);
346  // -----------------------------------------------
347 
348  spd(bb,"bb",r2,priProducts[pp],DATA_NOW,true);
349  spd(ff,"ff",r2,priProducts[pp],DATA_NOW,true);
350  spd(sigma,"sigma",r2,priProducts[pp],DATA_NOW,true);
351  spd(t1_polCorr,"t1_polCorr",r2,priProducts[pp],DATA_NOW,true);
352 
353  }
354 
355  } // end for each region in level 2 (and updating variables)
356 
357  //cout << "### "+i2s(thisYear-2)+"abb: " << d2s( MD->deathTimberInventory_get(iisskey (thisYear-2,11001,"Fut_Feu","30") ) ) << endl;
358 
359  // *** OPTIMISATION....
360 
361  // Create an instance of the IpoptApplication
362  //Opt *OPTa = new Opt(MTHREAD);
363  //SmartPtr<TNLP> OPTa = new Opt(MTHREAD);
364  SmartPtr<IpoptApplication> application = new IpoptApplication();
365  string linearSolver = MTHREAD->MD->getStringSetting("linearSolver",DATA_NOW);
366  application->Options()->SetStringValue("linear_solver", linearSolver); // default in ipopt is ma27
367  //application->Options()->SetStringValue("hessian_approximation", "limited-memory"); // quasi-newton approximation of the hessian
368  //application->Options()->SetIntegerValue("mumps_mem_percent", 100);
369  application->Options()->SetNumericValue("obj_scaling_factor", -1); // maximisation
370  application->Options()->SetNumericValue("max_cpu_time", 1800); // max 1/2 hour to find the optimus for one single year
371  application->Options()->SetStringValue("check_derivatives_for_naninf", "yes");
372  //application->Options()->SetStringValue("halt_on_ampl_error", "yes"); // not a valid option
373  //application->Options()->SetStringValue("print_options_documentation", "yes"); // print the ipopt options (hundreds of pages!)
374  // Relaxing tollerance options... worster effect :-(
375  //application->Options()->SetNumericValue("tol", 1e-07);
376  //application->Options()->SetNumericValue("constr_viol_tol",0.001);
377  //application->Options()->SetNumericValue("compl_inf_tol",0.001);
378  //application->Options()->SetNumericValue("acceptable_tol", 1e-05);
379  //application->Options()->SetNumericValue("acceptable_dual_inf_tol", 1e+9);
380  //application->Options()->SetNumericValue("acceptable_constr_viol_tol", 0.1);
381  //application->Options()->SetNumericValue("acceptable_compl_inf_tol", 0.1);
382 
383 
384  // Initialize the IpoptApplication and process the options
385  ApplicationReturnStatus status;
386  status = application->Initialize();
387  if (status != Solve_Succeeded) {
388  printf("\n\n*** Error during initialization!\n");
389  msgOut(MSG_INFO,"Error during initialization! Do you have the solver compiled for the specified linear solver?");
390  return;
391  }
392 
393  msgOut(MSG_INFO,"Running optimisation problem for this year (it may take a few minutes for large models)..");
394  status = application->OptimizeTNLP(MTHREAD->OPT);
395 
396  // cout << "### "+i2s(thisYear-2)+"abc: " << d2s( MD->deathTimberInventory_get(iisskey (thisYear-2,11001,"Fut_Feu","30") ) ) << endl;
397  // *** POST OPTIMISATION....
398 
399  // post-equilibrium variables->parameters assignments..
400  // RPAR(type,i,prd,t) = RVAR.l(type,i,prd);
401  // EX(i,j,prd,t) = EXP.l(i,j,prd);
402  // ObjT(t) = Obj.l ;
403  // ==> in Opt::finalize_solution()
404 
405  // Retrieve some statistics about the solve
406  if (status == Solve_Succeeded) {
407  Index iter_count = application->Statistics()->IterationCount();
408  Number final_obj = application->Statistics()->FinalObjective();
409  printf("\n*** The problem solved year %d in %d iterations!\n", thisYear, iter_count);
410  printf("\n*** The final value of the objective function is %e.\n", final_obj);
411  msgOut(MSG_INFO, "The problem solved successfully year "+i2s(thisYear)+" in "+i2s(iter_count)+" iterations.");
412  int icount = iter_count;
413  double obj = final_obj;
414  MTHREAD->DO->printOptLog(true, icount, obj);
415  } else {
416  //Number final_obj = application->Statistics()->FinalObjective();
417  int thisYear = MTHREAD->SCD->getYear();
418  cout << "***ERROR: MODEL DIDN'T SOLVE FOR THIS YEAR ("<<thisYear<<")"<< endl;
419  msgOut(MSG_CRITICAL_ERROR, "Model DIDN'T SOLVE for this year ("+i2s(thisYear)+")");
420  // IMPORTANT! Don't place the next two lines above the msgOut() function or it will crash in windows if the user press the stop button
421  //Index iter_count = application->Statistics()->IterationCount(); // syserror if model doesn't solve
422  //Number final_obj = application->Statistics()->FinalObjective();
423  int icount = 0;
424  double obj = 0;
425  MTHREAD->DO->printOptLog(false, icount, obj);
426  }
427 
428  for(uint r2= 0; r2<regIds2.size();r2++){ // you can use r2<=regIds2.size() to try an out-of range memory error that is not detected other than by valgrind (with a message "Invalid read of size 4 in ModelCore::runSimulationYear() in src/ModelCore.cpp:351")
429  int regId = regIds2[r2];
430  ModelRegion* REG = MTHREAD->MD->getRegion(regId);
431 
432  // *** st adjustments:***
433  // st_or[pp] -> original st from the mkt module
434  // st_fromFor[pp] -> cover the volumes from both alive trees and death ones. This should be bounded by inResByAnyCombination, that already includes death resources
435  // st_fromHarv[pp] -> from harvesting. This should be equal to hV and IGN data
436  // st
437  // st_or = st
438  // st_fromFor = st_or * a
439  // st_fromFor > in:
440  // (overharvesting)
441  // overHarv = st_fromFor - in
442  // st_fromFor = in
443  // st = st_fromFor/a
444  // st_fromHarv = distribute(st_fromFor)
445 
446  // // total supply and total demand..
447  // RPAR('st',i,prd,t) = RPAR('sl',i,prd,t)+RPAR('sa',i,prd,t);
448  // RPAR('dt',i,prd,t) = RPAR('dl',i,prd,t)+RPAR('da',i,prd,t);
449  // // weighted prices.. //changed 20120419
450  // RPAR('pw',i,p_tr,t) = (RPAR('dl',i,p_tr,t)*RPAR('pl',i,p_tr,t)+RPAR('da',i,p_tr,t)*PT(p_tr,t))/RPAR('dt',i,p_tr,t) ; //changed 20120419
451  // RPAR('pw',i,p_pr,t) = (RPAR('sl',i,p_pr,t)*RPAR('pl',i,p_pr,t)+RPAR('sa',i,p_pr,t)*PT(p_pr,t))/RPAR('st',i,p_pr,t) ; //changed 20120419
452  for (uint p=0;p<allProducts.size();p++){
453  double st = gpd("sl",regId,allProducts[p])+gpd("sa",regId,allProducts[p]);
454  double dt = gpd("dl",regId,allProducts[p])+gpd("da",regId,allProducts[p]);
455  spd(st,"st",regId,allProducts[p]);
456  spd(st,"st_or",regId,allProducts[p],DATA_NOW,true); // original total supply, not corrected by resetting it to min(st, inv).
457  spd(dt,"dt",regId,allProducts[p]);
458  }
459  for (uint p=0;p<secProducts.size();p++){
460  double dl = gpd("dl",regId,secProducts[p]);
461  double pl = gpd("pl",regId,secProducts[p]);
462  double da = gpd("da",regId,secProducts[p]); // bug corrected 20120913
463  double pworld = gpd("pl", WL2,secProducts[p]);
464  double dt = gpd("dt",regId,secProducts[p]);
465  double pw = dt?(dl*pl+da*pworld)/dt:0.0;
466  spd(pw,"pw",regId,secProducts[p]);
467  }
468  for (uint p=0;p<priProducts.size();p++){
469  double sl = gpd("sl",regId,priProducts[p]);
470  double pl = gpd("pl",regId,priProducts[p]);
471  double sa = gpd("sa",regId,priProducts[p]); // bug corrected 20120913
472  double pworld = gpd("pl", WL2,priProducts[p]);
473  double st = gpd("st",regId,priProducts[p]);
474  double st_fromFor = st * gpd("supply_fromForestShare",regId,priProducts[p]);
475  double pw = st?(sl*pl+sa*pworld)/st:0.0;
476  spd(pw,"pw",regId,priProducts[p]);
477  spd(st_fromFor,"st_fromFor",regId,priProducts[p],DATA_NOW,true);
478  }
479 
480  // Correcting st if this is over the in
481 
482  // Create a vector with all possible combinations of primary products
483  vector<vector<int>> priPrCombs = MTHREAD->MD->createCombinationsVector(priProducts.size());
484  int nPriPrCombs = priPrCombs.size();
485 
486  for (uint i=0;i<priPrCombs.size();i++){
487  double stMkMod = 0.0;
488  double sumIn = REG->inResByAnyCombination[i];
489  // double sumIn2 = 0.0;
490  for (uint p=0;p<priPrCombs[i].size();p++){
491  stMkMod += gpd("st_fromFor",regId,priProducts[priPrCombs[i][p]]);
492  //sumIn2 += gpd("in",regId,priProducts[priPrCombs[i][p]]);
493  }
494 
495  //if(sumIn<=0.00001){
496  // for (uint p=0;p<priPrCombs[i].size();p++){
497  // spd(0.0,"st",regId,priProducts[priPrCombs[i][p]]);
498  // }
499  // } else {
500  if(stMkMod>sumIn){ // if we harvested more than available
501  string pProductsInvolved = "";
502  for (uint p=0;p<priPrCombs[i].size();p++){
503  pProductsInvolved += (priProducts[priPrCombs[i][p]]+"; ");
504  }
505  double inV_over_hV_ratio = stMkMod ? sumIn/stMkMod : 0.0;
506  cumOverHarvesting += (stMkMod-sumIn);
507  msgOut(MSG_DEBUG, "Overharvesting has happened. Year: "+i2s(thisYear)+ "Region: "+i2s(regId)+"Involved products: "+pProductsInvolved+". sumIn: "+d2s(sumIn)+" stMkMod:" +d2s(stMkMod) + " cumOverHarvesting: "+d2s(cumOverHarvesting));
508  for (uint p=0;p<priPrCombs[i].size();p++){
509  double st_fromFor_orig = gpd("st_fromFor",regId,priProducts[priPrCombs[i][p]]);
510  spd(st_fromFor_orig*inV_over_hV_ratio,"st_fromFor",regId,priProducts[priPrCombs[i][p]]);
511  }
512  }
513 
514  //}
515 
516 
517  }
518 
519  //cout << "### "+i2s(thisYear-2)+"abe: " << d2s( MD->deathTimberInventory_get(iisskey (thisYear-2,11001,"Fut_Feu","30") ) ) << endl;
520  // here we create st_fromHarv as st_fromFor - st_from_deathbiomass
521  vector <double> total_st(priProducts.size(),0.);
522  vector <double> in_deathTimber(priProducts.size(),0.);
523  vector <double> in_aliveForest (priProducts.size(),0.);
524  for (uint i=0;i<priProducts.size();i++){
525  total_st[i] = gpd("st_fromFor",regId,priProducts[i]);
526  in_deathTimber[i] = gpd("in_deathTimber",regId,priProducts[i]);
527  in_aliveForest[i] = gpd("in",regId,priProducts[i]);
528  }
529  // cout << "### "+i2s(thisYear-2)+"abf: " << d2s( MD->deathTimberInventory_get(iisskey (thisYear-2,11001,"Fut_Feu","30") ) ) << endl;
530  vector <double> st_fromHarv= allocateHarvesting(total_st, regId);
531  //cout << "### "+i2s(thisYear-2)+"abg: " << d2s( MD->deathTimberInventory_get(iisskey (thisYear-2,11001,"Fut_Feu","30") ) ) << endl;
532  for (uint i=0;i<priProducts.size();i++){
533  spd(st_fromHarv[i],"st_fromHarv",regId,priProducts[i],DATA_NOW,true);
534  }
535 
536  } // end of each region
537 
538  // cout << "### "+i2s(thisYear-2)+"abh: " << d2s( MD->deathTimberInventory_get(iisskey (thisYear-2,11001,"Fut_Feu","30") ) ) << endl;
539 
540  if (cumOverHarvesting>0.0){
541  msgOut(MSG_DEBUG, "Overharvesting is present. Year: "+i2s(thisYear)+" cumOverHarvesting: "+d2s(cumOverHarvesting));
542  }
543 }
544 
545 /**
546  * @brief ModelCoreSpatial::runBiologicalModule
547  *
548  * Changes in Area:
549  * dc area_l area diff
550  * 0 ---------> +regArea -areaFirstProdClass (areaMovingUp_00)
551  * 15 ---------> +areaFirstPrClass -hArea_15 -areaMovingUp_15
552  * 25 ---------> +areaMovingUp15 - hArea_25 - areaMovingUp_25
553  * 35 ---------> +areaMovingUp25 - hArea_35 - areaMovingUp_35
554  * ...
555  * 95 ---------> +areaMovingUp85 - hArea_95 - areaMovingUp_95
556  * 105 ---------> +areaMovingUp95 - hArea_105
557  *
558  * note: regArea is computed in the management module, not here. Further, regArea is already the net one of forest area changes
559  */
560 void
562 
563  msgOut(MSG_INFO, "Starting resource module..");
564  int thisYear = MTHREAD->SCD->getYear();
565  bool useDeathTimber = MD->getBoolSetting("useDeathTimber",DATA_NOW);
566 
567  for(uint i=0;i<regIds2.size();i++){
568  int r2 = regIds2[i];
569  int regId = r2;
570  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
571  //Gis* GIS = MTHREAD->GIS;
572  regPx = REG->getMyPixels();
573  double shareMortalityUsableTimber = 0.0;
574 
575  for (uint p=0;p<regPx.size();p++){
576  Pixel* px = regPx[p];
577 
578  double pxId = px->getID();
579  //if (pxId == 3550.0){
580  // cout << "got the pixel" << endl;
581  //}
582  //px->expectedReturns.clear();
583  for(uint j=0;j<fTypes.size();j++){
584  string ft = fTypes[j];
585  //double pxArea_debug = px->getDoubleValue("forArea_"+ft, true);
586  vector <double> hV_byDiam;
587  vector <double> productivity_byDc; // mc/ha/y
588  vector < vector <double> > hV_byDiamAndPrd;
589  vector <double> hArea_byDc;
590  vector <double> newVol_byDiam;
591  vector <double> vMort_byDc;
592  vector <double> vMortAdd_byDc ; // fire mortality
593  vector <double> areasMovingUp(dClasses.size(), 0.0);
594  double areaFirstProdClass;
595 
596 
597  // A - COMPUTING THE REGENERATION..
598  // if we are in a year where the time of passage has not yet been reached
599  // for the specific i,e,l then we use the exogenous Vregen, otherwise we
600  // calculate it
601  //if ( not scen("fxVreg") ,
602  // loop( (i,essence,lambda),
603  // if( ord(t)>=(tp_u1(i,essence,lambda)+2),
604  // Vregen(i,lambda,essence,t)=regArea(i,essence,lambda,t-tp_u1(i,essence,lambda))*volHa_u1(i,essence,lambda)/1000000 ;
605  // );
606  // );
607  //);
608  int tp_u0 = px->tp.at(j).at(0); // time of passage to reach the first production diameter class // bug 20140318, added ceil. 20140318 removed it.. model did go crazy with it
609  if(thisYear == secondYear){
610  px->initialDc0Area.push_back(px->area_l.at(j).at(0));
611  }
612  if(regType != "fixed" && (thisYear-secondYear) >= tp_u0 ) { // T.O.D.O to be checked -> 20121109 OK
613  double pastRegArea = px->getPastRegArea(j,thisYear-tp_u0);
614  double availableArea = px->area_l.at(j).at(0);
615  //double entryVolHa = gfd("entryVolHa",regId,ft,"");
616  double vHa = px->vHa.at(j).at(1);
617  //attenction that at times could take the wrong pastRegArea if tp change too suddenly as in some "strange" scenarios
618  if (oldVol2AreaMethod){
619  areaFirstProdClass = pastRegArea;
620  } else {
621  areaFirstProdClass = min(availableArea, pastRegArea); // this is just a start and will need to include the last year area
622  }
623  px->vReg.push_back(areaFirstProdClass*vHa/1000000.0); // TO.DO: check the 1000000. Should be ok, as area in ha vol in Mm^3
624  //if (pxId == 3550.0 && j==3){
625  // cout << "got the pixel" << endl;
626  //}
627  #ifdef QT_DEBUG
628  if (areaFirstProdClass < 0.0){
629  //msgOut(MSG_CRITICAL_ERROR,"Negative regeneration volumes in endogenous regeneration");
630  }
631  if ( (availableArea-pastRegArea) < -0.00000001 ){
632  // in a very rare cases tp change first in a direction and then in the other, so that the wrong past regeneration area
633  // is picken up.
634  //msgOut(MSG_CRITICAL_ERROR,"Upgrading from dc0 more area than the available one in endogenous regeneration");
635  }
636  #endif
637  } else {
638  double regionArea = REG->getValue("forArea_"+ft,OP_SUM);
639  double pxArea = px->getDoubleValue("forArea_"+ft, true); // 20121109 bug solved (add get zero for not data)
640  double regRegVolumes = gfd("vReg",r2,ft,"");
641  double newVReg = regionArea ? regRegVolumes*pxArea/regionArea : 0.0;
642  px->vReg.push_back(newVReg); // 20121108 BUG !!! solved // as now we have the area we could also use here entryVolHa
643  // only a share of the exogenous area goes up, the regeneration one doesn't yet reach tp0:
644  // areaFirstProdClass = (1.0 / px->tp.at(j).at(0) ) * px->area_l.at(j).at(0);
645  areaFirstProdClass = (1.0 / ((double) tp_u0) ) * px->initialDc0Area.at(j);
646  // in the exogenous period we are exogenously upgrading u0->u1 some areas but, as we do not have the regeneration
647  // are corresponding to that we have also to manually add it to u0
648  //px->area_l.at(j).at(0) += areaFirstProdClass;
649  //areaFirstProdClass = entryVolHa ? newVReg*1000000 /entryVolHa:0.0;
650  //if (pxId == 3550.0 && j==3){
651  // cout << "got the pixel" << endl;
652  //}
653 
654  #ifdef QT_DEBUG
655  if (areaFirstProdClass<0.0){
656  // msgOut(MSG_CRITICAL_ERROR,"Negative regeneration volumes in exogenous regeneration");
657  }
658  if (areaFirstProdClass > px->area_l.at(j).at(0)){
659  //msgOut(MSG_CRITICAL_ERROR,"Moving up area higher than available area in exogenous regeneration !");
660  }
661  #endif
662  // vReg and entryVolHa are NOT the same thing. vReg is the yearly regeneration volumes
663  // for the whole region. We can use them when we don't know the harvested area
664  // entryVolHa can lead to vReg calculation only when we know the regeneration area. So in the
665  // first years we use vReg and subsequently the endogenous one.
666  }
667 
668  //double harvestedArea = 0;
669 
670  if(useDeathTimber){
671  shareMortalityUsableTimber = gfd("shareMortalityUsableTimber",r2,ft,"");
672  } else {
673  shareMortalityUsableTimber = 0.0;
674  }
675 
676  for (uint u=0; u<dClasses.size(); u++){
677  string dc = dClasses[u];
678  double hr = 0;
679  double age = getAvgAgeByDc(px, j, u);
680 
681  //double pastYearVol_reg = u ? gfd("vol",r2,ft,dc,thisYear-1): 0;
682  double pastYearVol = px->vol_l.at(j).at(u);
683  vector <double> hV_byPrd;
684  vector <double> hr_byPrd;
685 
686  // harvesting rate & volumes...
687  // hr is by region.. no reasons in one pixel the RATE of harvesting will be different than in an other pixel
688  //hr(u,i,essence,lambda,t) = sum(p_pr, prov(u,essence,lambda,p_pr)*RPAR('st',i,p_pr,t)/In(i,p_pr,t));
689  //hV(u,i,essence,lambda,t) = hr(u,i,essence,lambda,t) * V(u,i,lambda,essence,t-1);
690  //hV_byPrd(u,i,essence,lambda,p_pr,t) = prov(u,essence,lambda,p_pr)*(RPAR('st',i,p_pr,t)/In(i,p_pr,t))*V(u,i,lambda,essence,t-1);
691  for(uint pp=0;pp<priProducts.size();pp++){
692  double st = gpd("st_fromHarv",r2,priProducts[pp]);
693  double in = gpd("in",r2,priProducts[pp]);
694  double hr_pr = in ? app(priProducts[pp],ft,dc)*st/in : 0.0;
695  hr_byPrd.push_back( hr_pr);
696  hr += hr_pr;
697  }
698 
699  // adjusting for overharvesting..
700  // 20160204: inserted to account that we let supply to be marginally higher than in in the mamarket module, to let the solver solving
701  double origHr = hr;
702  hr = min(1.0,hr);
703  for(uint pp=0;pp<priProducts.size();pp++){
704  double hr_pr = origHr ? hr_byPrd[pp] * min(1.0,1.0/origHr) : 0.0;
705  hV_byPrd.push_back( hr_pr*pastYearVol*px->avalCoef.at(j));
706  }
707 
708  double hV = hr*pastYearVol*px->avalCoef.at(j);
709 
710 
711  hV_byDiam.push_back(hV);
712  hV_byDiamAndPrd.push_back(hV_byPrd);
713 
714  // post harvesting remained volumes computation..
715  // loop(u$(ord(u)=1),
716  // first diameter class, no harvesting and fixed regenaration..
717  // V(u,i,lambda,essence,t)=(1-1/(tp(u,i,lambda,essence))-mort(u,i,lambda,essence) )*V(u,i,lambda,essence,t-1)
718  // +Vregen(i,lambda,essence,t);
719  // );
720  // loop(u$(ord(u)>1),
721  // generic case..
722  // V(u,i,lambda,essence,t)=((1-1/(tp(u,i,lambda,essence))
723  // -mort(u,i,lambda,essence) - hr(u,i,essence,lambda,t))*V(u,i,lambda,essence,t-1)
724  // +(1/(tp(u-1,i,lambda,essence)))*beta(u,i,lambda,essence)*V(u-1,i,lambda,essence,t-1));
725  double vol;
726  double tp = px->tp.at(j).at(u); //gfd("tp",regId,ft,dc);
727  double mort = px->mort.at(j).at(u); //gfd("mortCoef",regId,ft,dc);
728  double addMort = px->addMort.at(j).at(u); // fire mortality
729  double vReg = px->vReg.at(j); //gfd("vReg",regId,ft,""); // Taking it from the memory database as we could be in a fixed vReg scenario and not having calculated it from above!
730  double beta = px->beta.at(j).at(u); //gfd("betaCoef",regId,ft,dc);
731  //double hv2fa = gfd("hv2fa",regId,ft,dc);
732  double vHa = px->vHa.at(j).at(u); //gfd("vHa",regId,ft,dc);
733  double finalHarvestFlag = gfd("finalHarvestFlag",regId,ft,dc);
734 
735  double vMort = mort*pastYearVol;
736  double vMortAdd = addMort*pastYearVol;
737 
738  vMort_byDc.push_back(vMort);
739  vMortAdd_byDc.push_back(vMortAdd); // fire mortality
740 
741  if(useDeathTimber){
742  iisskey key(thisYear,r2,ft,dc);
743 
744  MD->deathTimberInventory_incrOrAdd(key,(vMort+vMortAdd)*shareMortalityUsableTimber);
745  }
746 
747  if(u==0){
748  vol = 0.0;
749  }else if(u==1){
750  vol = max(0.0,(1-1/tp-mort-addMort))*pastYearVol+vReg; //Antonello, "bug" fixed 20160203: In case of very strong mortality this quantity (that doesn't include harvesting) could be negative!
751  //double debug = vol;
752  #ifdef QT_DEBUG
753  if ((1-1/tp-mort-addMort)<0.0){
754  msgOut(MSG_DEBUG,"The sum of leaving trres and mortality would have lead to nevative volume if we didn't put a max. 1/tp: "+d2s(1/tp)+", mort: "+d2s(mort)+", total coeff: "+d2s((1-1/tp-mort))+" ");
755  }
756  #endif
757  } else {
758  // time of passage and volume of smaller diameter class
759  double inc = (u==dClasses.size()-1)?0:1./tp; // we exclude the possibility for trees in the last diameter class to move to an upper class
760  double tp_1 = px->tp.at(j).at(u-1); //gfd("tp",regId,ft,dClasses[u-1]);
761  double pastYearVol_1 = px->vol_l.at(j).at(u-1); //gfd("vol",regId,ft,dClasses[u-1],thisYear-1);
762  //vol = max(0.0,(1-inc-mort-hr)*pastYearVol+(1/tp_1)*beta*pastYearVol_1);
763  vol = max(0.0,(1-inc-mort-addMort)*pastYearVol-hV+(1/tp_1)*beta*pastYearVol_1); // I can't use any more hr as it is the harvesting rate over the available volumes, not the whole ones
764  #ifdef QT_DEBUG
765  if ((1-inc-mort)*pastYearVol-hV+(1/tp_1)*beta*pastYearVol_1 < 0){
766  double realVolumes = (1-inc-mort-addMort)*pastYearVol-hV+(1/tp_1)*beta*pastYearVol_1;
767  msgOut(MSG_DEBUG,"Negative real volumes ("+d2s(realVolumes)+"), possibly because of little bit larger bounds in the market module to avoid zeros. Volumes in the resource module set back to zero, so it should be ok.");
768  }
769  #endif
770  }
771  if(u != 0){ // this if is required to avoid a 0/0 and na error that then propage also in vSum()
772  double inc = (u==dClasses.size()-1)?0:1.0/tp; // we exclude the possibility for trees in the last diameter class to move to an upper class
773  double volumesMovingUp = inc*pastYearVol;
774  double pastArea = px->area_l.at(j).at(u);
775 
776  areasMovingUp.at(u) = inc*pastArea;
777 
778  if(oldVol2AreaMethod) {
779  hArea_byDc.push_back(finalHarvestFlag*1000000*hV/vHa); // volumes are in Mm^3, area in ha, vHa in m^3/ha
780  } else {
781  double finalHarvestedVolumes = finalHarvestFlag* hV;
782  double finalHarvestedRate = pastYearVol?finalHarvestedVolumes/pastYearVol:0.0; // Here we want the harvested rate over the whole volumes, not just the available ones, so we don't need to multiply to px->avalCoef.at(j)
783  #ifdef QT_DEBUG
784  if (finalHarvestedRate > 1.0){
785  msgOut(MSG_CRITICAL_ERROR,"Negative final harvested rate.");
786  }
787  #endif
788  hArea_byDc.push_back(finalHarvestedRate*pastArea); // volumes are in Mm^3, area in ha, vHa in m^3/ha
789  }
790  px->area.at(j).at(u) = max(0.0, px->area_l.at(j).at(u) - areasMovingUp.at(u) + areasMovingUp.at(u-1) - hArea_byDc.at(u));
791  #ifdef QT_DEBUG
792  if ((px->area_l.at(j).at(u) - areasMovingUp.at(u) + areasMovingUp.at(u-1) - hArea_byDc.at(u))< 0.0){
793  msgOut(MSG_DEBUG,"If not for a max, we would have had a negative area ("+d2s(px->area_l.at(j).at(u) - areasMovingUp.at(u) + areasMovingUp.at(u-1) - hArea_byDc.at(u))+" ha).");
794  }
795  #endif
796  } else {
797  areasMovingUp.at(u) = areaFirstProdClass;
798  hArea_byDc.push_back(0.);
799  px->area.at(j).at(u) = px->area_l.at(j).at(u) - areasMovingUp.at(u) - hArea_byDc.at(u);
800  //if (pxId == 3550.0 && j==3){
801  // cout << "got the pixel" << endl;
802  //}
803  }
804  newVol_byDiam.push_back(vol);
805  #ifdef QT_DEBUG
806  if(px->area.at(j).at(u)< 0.0 || areasMovingUp.at(u) < 0.0 || hArea_byDc.at(u) < 0.0 ){
807  msgOut(MSG_CRITICAL_ERROR, "Negative values in runBiologicalModel");
808  }
809  #endif
810 
811  //double debug = hv2fa*hr*pastYearVol*100;
812  //cout << "regId|ft|dc| debug | freeArea: " << r2 << "|"<<ft<<"|"<<dc<<"| "<< debug << " | " << freeArea_byU << endl;
813 
814  //sfd(hr,"hr",regId,ft,dc);
815  //sfd(hV,"hV",regId,ft,dc);
816  //sfd(vol,"vol",regId,ft,dc);
817 
818  //sfd(freeArea_byU,"harvestedArea",regId,ft,dc,DATA_NOW,true);
819  double productivity = hArea_byDc.at(u) ? (1000000.0 * hV_byDiam.at(u))/(hArea_byDc.at(u)*age) : 0.0;
820  productivity_byDc.push_back(productivity);
821  } // end foreach diameter classes
822  px->hVol.push_back(hV_byDiam);
823  px->hVol_byPrd.push_back(hV_byDiamAndPrd);
824  px->hArea.push_back(hArea_byDc);
825  px->vol.push_back(newVol_byDiam);
826  px->vMort.push_back(vMort_byDc);
827  px->vMortAdd.push_back(vMortAdd_byDc); // fire mortality
828  px->hProductivity.push_back(productivity_byDc);
829 
830 
831  #ifdef QT_DEBUG
832  for (uint u=1; u<dClasses.size(); u++){
833  double volMort = vMort_byDc[u];
834  double harvVol = hV_byDiam[u];
835  double vol_new = newVol_byDiam[u];
836  double vol_lagged = px->vol_l.at(j).at(u);
837  double gain = vol_new - (vol_lagged-harvVol-volMort);
838  if (volMort > vol_lagged){
839  msgOut(MSG_CRITICAL_ERROR,"mort vol > lagged volumes ?");
840  }
841  }
842  #endif
843  } // end of each forest type
844  } // end of each pixel
845 
846  #ifdef QT_DEBUG
847  // checking that in a region the total hVol is equal to the st for each products. 20150122 Test passed with the new availCoef
848  double sumSt = 0.0;
849  double sumHv = 0.0;
850  for(uint pp=0;pp<priProducts.size();pp++){
851  sumSt += gpd("st_fromHarv",r2,priProducts[pp]);
852  }
853  for (uint p=0;p<regPx.size();p++){
854  for(uint j=0;j<fTypes.size();j++){
855  for (uint u=0; u<dClasses.size(); u++){
856  for(uint pp=0;pp<priProducts.size();pp++){
857  // by ft, dc, pp
858  sumHv += regPx[p]->hVol_byPrd[j][u][pp];
859  }
860  }
861  }
862  }
863  if(abs(sumSt-sumHv) > 0.000001){
864  msgOut(MSG_DEBUG, "St and harvested volumes diverge in region "+REG->getRegSName()+". St: "+d2s(sumSt)+" hV: "+d2s(sumHv));
865  }
866  #endif
867  } // end of each region
868 
869 }
870 
871 
872 
873 void
875  msgOut(MSG_INFO, "Starting management module..");
876  vector<string> allFTypes = MTHREAD->MD->getForTypeIds(true);
877  map<string,double> hAreaByFTypeGroup = vectorToMap(allFTypes,0.0);
878  int thisYear = MTHREAD->SCD->getYear();
879  double ir = MTHREAD->MD->getDoubleSetting("ir");
880 
881  // Post optimisation management module..
882  for(uint i=0;i<regIds2.size();i++){
883  int r2 = regIds2[i];
884  int regId = r2;
885  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
886  regPx = REG->getMyPixels();
887 
888  string flagHartman = MTHREAD->MD->getStringSetting("flagHartman",DATA_NOW,r2);
889  double pickling = MTHREAD->MD->getDoubleSetting("pickling"); // pickling factor used to factor in sequestration ex-situ and substitution effects
890 
891  //double exogenousReference = MTHREAD->MD->getBoolSetting("exogenousReference"); // new setting to distinguish between exogenous and endogenous carbon reference levels
892 
893 
894 
895 
896  // Caching futureCarbonPrices..
897  double carbonPrice_NOW = gpd("carbonPrice",r2,"",thisYear+1); // first carbon payments take place the year after replanting
898  vector<double> futureCarbonPrices;
899  if(flagHartman!="NONE"){
900  for (int y=0; y<1000; y++) {
901  futureCarbonPrices.push_back(gpd("carbonPrice",r2,"",thisYear + y));
902  }
903  }
904 
905  vector<double> polBal_fiSub (fTypes.size(),0.0); // Expenses for forest investment subsides by region and destination ft €
906 
907  // Dealing with area change..
908  double fArea_reg = REG->getArea();
909  double fArea_diff = 0.0;
910  double fArea_reldiff = 0.0;
911  if(forestAreaChangeMethod=="relative"){
912  fArea_reldiff = gfd("forestChangeAreaIncrementsRel",r2,"","",DATA_NOW);
913  fArea_diff = fArea_reg * fArea_reldiff;
914  } else if (forestAreaChangeMethod=="absolute"){
915  fArea_diff = gfd("forestChangeAreaIncrementsHa",r2,"","",DATA_NOW);
916  fArea_reldiff = fArea_diff / fArea_reg;
917  }
918 
919  double regHArea = 0.0; // for the warning
920 
921  // Management rate
922  // 20170123: if mr is defined at regional level in the forData table, good, otherwise we look at settings table.
923  // If it isn't there as well, we rise a critical error.
924  // 20170302: removed the (false positive) error message raised if we put mr in setting table
925  // 20180626: mr is now, like all the other settings, powered with a regional dimension, so this looking in the
926  // forest data is no longer needed, but we keep it as some old projects have mr in the forData sheet.
927  double mr;
928  int origErrorLevel = MD->getErrorLevel();
929  MTHREAD->MD->setTempBool(true); // this should not be needed as it is set positive in getForData() map soubroutine..
931  mr = MD->getForData("mr",regId,"","");
932  if(!MTHREAD->MD->getTempBool()) { // mr not found in the forest data
934  mr = MD->getDoubleSetting("mr",DATA_NOW,regId);
935  }
936  //try{
937  // mr = MD->getForData("mr",regId,"","");
938  //} catch (...){
939  // mr = MD->getDoubleSetting("mr");
940  //}
941  MD->setErrorLevel(origErrorLevel);
942 
943  // Caching carbon reference levels by ftypes
944  vector<vector<double>> futureCarbonRef;
945  vector<vector<double>> futureExtraBiomass_ratio;
946 
947  if(flagHartman!="NONE"){
948  for(uint j=0;j<fTypes.size();j++){
949  string ft = fTypes[j];
950  vector<double> futureCarbonRef_ft;
951  vector<double>futureExtraBiomass_ratio_ft;
952  for (int y=0; y<1000; y++) {
953  double carbonRef = gfd("carbonRef",r2,ft,"", thisYear + y);
954  futureCarbonRef_ft.push_back(carbonRef);
955  double extraBiomass_ratio = gfd("extraBiomass_ratio",regId,ft,"",DATA_NOW);
956  futureExtraBiomass_ratio_ft.push_back(extraBiomass_ratio);
957  }
958  futureCarbonRef.push_back(futureCarbonRef_ft);
959  futureExtraBiomass_ratio.push_back(futureExtraBiomass_ratio_ft);
960  }
961  }
962 
963 
964  for (uint p=0;p<regPx.size();p++){
965  Pixel* px = regPx[p];
966  px->expectedReturns.clear();
967  px->expectedReturnsNotCorrByRa.clear(); // BUG discovered 20160825
968  //px->expectedReturnsCarbon.clear(); TODO
969  px->optDc.clear();
970  px->optFtChosen.clear();
971  px->optDcChosen.clear();
972  px->expectedAnnualIncome_carbon.clear();
973  px->expectedAnnualIncome_timber.clear();
974  resetMapValues(hAreaByFTypeGroup,0.0);
975  double totalHarvestedAreaOrig = vSum(px->hArea); // still need to remove the forest decrease areas..
976  double totalHarvestedArea = 0.0; // account for (eventual) forest decreases area)
977  vector<double> thisYearRegAreas(fTypes.size(),0.0); // initialize a vector of fTypes.size() zeros.
978  vector<double> expectedReturns(fTypes.size(),0.0); // uncorrected expected returns (without considering transaction costs). These are in form of eai
979  vector<double> expectedReturnsCarbon(fTypes.size(),0.0); // expected return of the carbon only
980  vector<int> rotation_dc(fTypes.size(),0.0); //vector to store optimal dc for each ftype (to be reused when flagHartman == "BAU")
981  vector< vector<double> > deltaAreas(fTypes.size()+1, vector<double>(fTypes.size()+1,0)); // matrix of regeneration areas ft_from/ft_to
982 
983 
984 
985  double fArea_px = vSum(px->area);
986  double fArea_diff_px = fArea_px * fArea_diff/ fArea_reg;
987 
988  double fArea_incr = max(0.0,fArea_diff_px);
989  double fArea_incr_rel = totalHarvestedAreaOrig?fArea_incr/totalHarvestedAreaOrig:0.0;
990  double fArea_decr = - min(0.0,fArea_diff_px);
991  double fArea_decr_rel = totalHarvestedAreaOrig?min(1.0,fArea_decr/totalHarvestedAreaOrig):0.0; // we can "remove" at maximum what has been harvested
992  regHArea += totalHarvestedAreaOrig;
993  totalHarvestedArea = totalHarvestedAreaOrig *(1-fArea_decr_rel);
994 
995 
996  // A - Computing the harvestingArea by parent ft group (for the allocation according to the prob of presence):
997  for(uint j=0;j<fTypes.size();j++){
998  string ft = fTypes[j];
999  string parentFt = MTHREAD->MD->getForTypeParentId(ft);
1000  double hAreaThisFt=vSum(px->hArea.at(j))*(1-fArea_decr_rel);
1001  incrMapValue(hAreaByFTypeGroup,parentFt,hAreaThisFt); // increment the parent ft of the harvested area, neeed for assigning the frequences (prob. of presence)
1002  }
1003 
1004  // B - Computing the uncorrected expected returns (without considering transaction costs)
1005  // 20120910, Antonello: changed.. calculating the expected returns also for fixed and fromHrLevel regeneration (then not used but gives indication)
1006  // calculating the expected returns..
1007  // loop ( (u,i,essence,lambda,p_pr),
1008  // if (sum(u2, hV(u2,i,essence,lambda,t))= 0,
1009  // expRetPondCoef(u,i,essence,lambda,p_pr) = 0;
1010  // else
1011  // expRetPondCoef(u,i,essence,lambda,p_pr) = hV_byPrd(u,i,essence,lambda,p_pr,t)/ sum(u2, hV(u2,i,essence,lambda,t));
1012  // );
1013  // );
1014  // expReturns(i,essence,lambda) = sum( (u,p_pr),
1015  // RPAR("pl",i,p_pr,t)*hv2fa(i,essence,lambda,u)*(1/df_byFT(u,i,lambda,essence))* // df_byFT(u,i,lambda,essence)
1016  // expRetPondCoef(u,i,essence,lambda,p_pr)
1017  // );
1018 
1019  for(uint j=0;j<fTypes.size();j++){
1020  string ft = fTypes[j];
1021 
1022  // Added for carbon project
1023  double co2content_inventory = gfd("co2content_inventory",r2,ft,"",DATA_NOW)/1000; // kilos to tons
1024  // End of added
1025 
1026 
1027  double expReturns = std::numeric_limits<double>::lowest(); // these are reset at the beginning of each forest type, and need to be declared before the next loop
1028  double expReturns_carbon = std::numeric_limits<double>::lowest();
1029  double expReturns_timber = std::numeric_limits<double>::lowest();
1030  int optDc = 0; // "optimal diameter class", the one on which the expected returns are computed
1031  for (uint u=0; u<dClasses.size(); u++){
1032  string dc = dClasses[u];
1033  double vHa = px->vHa_exp.at(j).at(u);
1034  double finalHarvestFlag = gfd("finalHarvestFlag",regId,ft,dc);
1035  double cumTp_u = px->cumTp_exp.at(j).at(u);
1036  for (uint pp=0;pp<priProducts.size();pp++){
1037  double pl = gpd("pl",regId,priProducts[pp]); // note that this is the OBSERVED price. If we call it at current year+cumTp_u we would have the expected price. But we would first have to compute it, as pw is weigthed price world-local and we don't have local price for the future. DONE 20141202 ;-)
1038  double worldCurPrice = gpd("pl",WL2,priProducts[pp]);
1039  double worldFutPrice = gpd("pl",WL2,priProducts[pp],thisYear+cumTp_u);
1040  double sl = gpd("sl",regId,priProducts[pp]);
1041  double sa = gpd("sa",regId,priProducts[pp]);
1042  double pw_exp = computeExpectedPrice(pl, worldCurPrice, worldFutPrice, sl, sa, px->expTypePrices); //20141030: added the expected price!
1043  double raw_amount = finalHarvestFlag*pw_exp*vHa*app(priProducts[pp],ft,dc); // B.U.G. 20121126, it was missing app(pp,ft,dc) !!
1044  double anualised_amount_timber = MD->calculateAnnualisedEquivalent(raw_amount,cumTp_u, ir); // expected revenues from timber
1045  double anualised_amount = anualised_amount_timber; // total expected revenues (for now there is no carbon payment yet)
1046 
1047  // If carbon payments are included and the reference is an exogenous fixed value (e.g. year of reference), we compute revenues from carbon after retrieving reference from input files
1048  // Added for carbon project-------------------------------------------------------------------------
1049  double raw_amount_carbon = 0; // declared and initialized before IF statement and FOR loop
1050  if (flagHartman =="EXO") {
1051  double carbonPrice_y = carbonPrice_NOW;
1052  for (int y=0; y<cumTp_u; y++) {
1053  //double carbonPrice_y = futureCarbonPrices[y];
1054  double carbonRef = futureCarbonRef[j][y];
1055  double expectedVHa_y = getVHaByYear(px, j, y, futureExtraBiomass_ratio[j][y], regId);
1056  double carbonPayment_y = carbonPrice_y*ir*(co2content_inventory*expectedVHa_y - carbonRef);
1057  raw_amount_carbon += carbonPayment_y*pow(1+ir,cumTp_u - y); // raw amount is calculated iteratively
1058  }
1059  //Now we add the final payment corresponding to carbon sequestered in long-lived products
1060  //double CarbonPrice_cumTp = futureCarbonPrices[floor(cumTp_u)]; INSTEAD WE USE CURRENT CARBON PRICE
1061  raw_amount_carbon += pickling * carbonPrice_y * finalHarvestFlag * vHa * app(priProducts[pp],ft,dc); // we add the final payment to raw amount for the final year, outside the FOR loop
1062  }
1063  // And we finally annualise everything
1064  double anualised_amount_carbon = MD->calculateAnnualisedEquivalent(raw_amount_carbon,cumTp_u, ir);
1065  anualised_amount += anualised_amount_carbon; // anualised_amount already had the timber revenues in it, now we add carbon
1066 
1067  // End of added-----------------------------------------------------------------------------------
1068 
1069 
1070 
1071  if (anualised_amount>expReturns) {
1072  expReturns=anualised_amount;
1073  optDc = u;// this is the index of the optimal dc, not the dc itself
1074  expReturns_carbon = anualised_amount_carbon;
1075  expReturns_timber = anualised_amount_timber;
1076  }
1077  } // end for each product
1078  } // end for each diameter class
1079 
1080  //results are pushed back to pixel only if they are final
1081  if (flagHartman=="NONE" || flagHartman=="EXO") {
1082  px->expectedAnnualIncome_timber.push_back(expReturns_timber);
1083  px->expectedAnnualIncome_carbon.push_back(expReturns_carbon);
1084  px->expectedReturnsNotCorrByRa.push_back(expReturns);
1085  px->optDc.push_back(optDc);
1086  }
1087 
1088  //calculation of expected returns with risk aversion
1089  if(MD->getBoolSetting("heterogeneousRiskAversion",DATA_NOW)){
1090  double ra = px->getDoubleValue("ra");
1091  double cumMort = 1-px->cumAlive_exp.at(j).at(optDc);
1092  //cout << px->getID() << "\t" << ft << "\t\t" << "optDc" << optDc << "\t" << cumMort << endl;
1093  double origExpReturns = expReturns;
1094  expReturns = origExpReturns * (1.0 - ra*cumMort);
1095  }
1096 
1097  // results are pushed back to pixel only if they are final
1098  if (flagHartman=="NONE" || flagHartman=="EXO") {
1099  px->expectedReturns.push_back(expReturns); // if there is no risk version, then expectedReturns and expectedReturnsNotCorrByRa will have the same value
1100  }
1101 
1102  expectedReturns.at(j) = expReturns;
1103  rotation_dc.at(j) = optDc; // here we store the index of optimal dc for the forest type considered
1104 
1105  } // end foreach forest type
1106 
1107 
1108  // Looping again by forest types, here by ft in input (those harvested)
1109  // Added for second carbon project-------------------------------------------------------------------------------------------------------------------------------------
1110  // If carbon payments are included and the reference is BAU (ie, endogenous, we need a new loop-------------------------------------------------------------------
1111  // In this case, the reference is what happens without policy, i.e. the result of the previous loop when (flagHartman =="REFERENCE") is FALSE
1112  // So we just redo another loop with carbon payments using the previous result as reference
1113 
1114  if (flagHartman =="ENDO") { // if carbon payments take place and reference is endogenous (is BAU)
1115  // STEP 1 - retrieve optimal regeneration choice without carbon payments
1116  int ft_opt_index = getMaxPos(expectedReturns); // Faustmannian optimal forest type index
1117  string ft_opt = fTypes[ft_opt_index]; // Faustmannian optimal forest type name
1118  int dc_opt_index = rotation_dc[ft_opt_index]; // Faustmannian optimal diameter class index
1119  string dc_opt = dClasses[dc_opt_index]; // Faustmannian optimal diameter class name
1120  double cumTP_opt = px->cumTp_exp.at(ft_opt_index).at(dc_opt_index); // Faustmannian optimal rotation length
1121  double co2content_inventory_opt = gfd("co2content_inventory",r2,ft_opt,"",DATA_NOW)/1000; // kilos to tons
1122 
1123  // STEP 2 - calculate expected carbon contents on the optimal choice without carbon payments (Faustmannian optimum), to use as reference
1124  vector <double> FutureCarbonContent_opt;
1125  for (int y=0; y<=cumTP_opt; y++) {
1126  double expectedVHa_opt_y = getVHaByYear(px, ft_opt_index, y, futureExtraBiomass_ratio[ft_opt_index][y], regId);
1127  FutureCarbonContent_opt.push_back(co2content_inventory_opt*expectedVHa_opt_y);
1128  }
1129  // the Hartmanian choice will usually be longer, so the Faustmannian rotation needs to be replicated several times (at least 2000 years, should be enough)
1130  while (FutureCarbonContent_opt.size()<=2000){
1131  FutureCarbonContent_opt.insert(std::end(FutureCarbonContent_opt), std::begin(FutureCarbonContent_opt), std::end(FutureCarbonContent_opt));
1132  }
1133  //int cumTP_opt_floor = FutureCarbonContent_opt.size(); // need to get an integer value
1134 
1135  // STEP 3 - new loop to find optimal regeneration choice WITH carbon payments, using the choice WITHOUT carbon payments as a reference
1136 
1137  for(uint j=0;j<fTypes.size();j++){
1138  string ft = fTypes[j];
1139  double co2content_inventory = gfd("co2content_inventory",r2,ft,"",DATA_NOW)/1000; // kilos to tons
1140 
1141  double expReturns_hartman = -10000000000; // need to initialize because we use it for a comparison
1142  double expReturns_timber_hartman; // no need to give an initial value because they are not used before being given a value
1143  double expReturns_carbon_hartman;
1144  int optDc_hartman = -1; // "optimal diameter class", the one on which the expected returns are computed
1145  for (uint u=1; u<dClasses.size(); u++){
1146  string dc = dClasses[u];
1147  double cumTp_u = px->cumTp_exp.at(j).at(u);
1148 
1149  // First we calculate expected revenues from carbon (they used to be inside the products loop, but they don't need to be since they don't depend on products to be sold)
1150  double raw_amount_carbon=0;
1151  double carbonPrice_y = carbonPrice_NOW; //carbon price is by default current price, ie it is assumed to be constant from owner's perspective
1152  for (int y=0; y<=cumTp_u; y++) { // compute yearly carbon payments
1153  double expectedVHa_y = getVHaByYear(px, j, y, futureExtraBiomass_ratio[j][y], regId);
1154  double carbonPayment_y;
1155  carbonPayment_y = carbonPrice_y*ir*(co2content_inventory*expectedVHa_y - FutureCarbonContent_opt[y]);
1156 
1157  //r if (y==0) { // we use euclidian division, so an exception must be made when denominator is equal to 0
1158  //r carbonPayment_y = 0; // in the first year, no forest can reach a non-zero volume so payments are 0
1159  //r} else {
1160  //r int factor = y/cumTP_opt_floor; // euclidian division of the two lengths we compare
1161  //r if (cumTP_opt_floor%y >0) { // if the year is NOT a multiple of optimal rotation time, we need to take the year-th value of vector modulus how many rotations we have passed
1162  //r carbonPayment_y = carbonPrice_y*ir*(co2content_inventory*expectedVHa_y - FutureCarbonContent_opt[y - factor*cumTP_opt_floor]);
1163  //r } else { // if the year is EXACTLY a multiple of optimal rotation time, we need to take the last value of vector, not the first one
1164  //r carbonPayment_y = carbonPrice_y*ir*(co2content_inventory*expectedVHa_y - FutureCarbonContent_opt[cumTP_opt_floor]);
1165  //r }
1166  //r}
1167  raw_amount_carbon += carbonPayment_y*pow(1+ir,cumTp_u - y); // all payments compounded to end of rotation
1168  }
1169 
1170  // And now we compute revenues from timber (we try several products, as in the Faustmannian rotation)
1171  double vHa = px->vHa_exp.at(j).at(u);
1172  double finalHarvestFlag = gfd("finalHarvestFlag",regId,ft,dc);
1173  for (uint pp=0;pp<priProducts.size();pp++){
1174  double pl = gpd("pl",regId,priProducts[pp]); // note that this is the OBSERVED price. If we call it at current year+cumTp_u we would have the expected price. But we would first have to compute it, as pw is weigthed price world-local and we don't have local price for the future. DONE 20141202 ;-)
1175  double worldCurPrice = gpd("pl",WL2,priProducts[pp]);
1176  double worldFutPrice = gpd("pl",WL2,priProducts[pp],thisYear+cumTp_u);
1177  double sl = gpd("sl",regId,priProducts[pp]);
1178  double sa = gpd("sa",regId,priProducts[pp]);
1179  double pw_exp = computeExpectedPrice(pl, worldCurPrice, worldFutPrice, sl, sa, px->expTypePrices); //20141030: added the expected price!
1180  double raw_amount = finalHarvestFlag*pw_exp*vHa*app(priProducts[pp],ft,dc); // B.U.G. 20121126, it was missing app(pp,ft,dc) !!
1181  double anualised_amount_timber = MD->calculateAnnualisedEquivalent(raw_amount,cumTp_u, ir);
1182 
1183  // Now we add the final carbon payment corresponding to carbon sequestered in long-lived products
1184  raw_amount_carbon += pickling * carbonPrice_y * finalHarvestFlag * vHa * app(priProducts[pp],ft,dc);
1185  // And we annualise carbon revenues and add them to annualised timber revenues
1186  double anualised_amount_carbon = MD->calculateAnnualisedEquivalent(raw_amount_carbon,cumTp_u, ir);
1187  //px->expectedReturnsCarbon = anualised_amount_carbon; TODO
1188 
1189  double anualised_amount = anualised_amount_timber + anualised_amount_carbon;
1190 
1191  // If the new expected revenues is higher than previously, we store the maximum value and corresponding dc.
1192  if (anualised_amount>=expReturns_hartman) {
1193  expReturns_hartman=anualised_amount;
1194  expReturns_carbon_hartman = anualised_amount_carbon;
1195  expReturns_timber_hartman = anualised_amount_timber;
1196  optDc_hartman = u;
1197  }
1198  } // end foreach primary product
1199  } // end foreach DC
1200 
1201  // Here we send results to pixel because they are final
1202  px->expectedAnnualIncome_timber.push_back(expReturns_timber_hartman);
1203  px->expectedAnnualIncome_carbon.push_back(expReturns_carbon_hartman);
1204  px->expectedReturnsNotCorrByRa.push_back(expReturns_hartman);
1205  px->optDc.push_back(optDc_hartman);
1206 
1207  // And with risk aversion
1208  if(MD->getBoolSetting("heterogeneousRiskAversion",DATA_NOW)){
1209  double ra = px->getDoubleValue("ra");
1210  double cumMort = 1-px->cumAlive_exp.at(j).at(optDc_hartman);
1211  //cout << px->getID() << "\t" << ft << "\t\t" << "optDc" << optDc << "\t" << cumMort << endl;
1212  double origExpReturns_hartman = expReturns_hartman;
1213  expReturns_hartman = origExpReturns_hartman * (1.0 - ra*cumMort);
1214  }
1215 
1216  px->expectedReturns.push_back(expReturns_hartman);
1217 
1218  // Now we replace results from the previous loop with the new ones
1219 
1220  expectedReturns.at(j) = expReturns_hartman;
1221  rotation_dc.at(j) = optDc_hartman;
1222 
1223 
1224  } // end foreach forest type
1225 
1226  } // end IF Hartman reference is BAU (no policy)----------------------------------------------------------------
1227 
1228  // End of added second carbon project---------------------------------------------------------------------------
1229 
1230  for(uint j=0;j<fTypes.size();j++){
1231  string ft = fTypes[j];
1232  forType* thisFt = MTHREAD->MD->getForType(ft);
1233 
1234  double harvestedAreaForThisFT = vSum(px->hArea.at(j))*(1-fArea_decr_rel); // gfd("harvestedArea",regId,ft,DIAM_ALL);
1235  deltaAreas[j][fTypes.size()] += vSum(px->hArea.at(j))*(fArea_decr_rel);
1236  vector<double> corrExpectedReturns(fTypes.size(),0.0); // corrected expected returns (considering transaction costs). These are in form of NPV
1237  vector<double> polBal_fiSub_unitvalue(fTypes.size(),0.0); // subside (tax) per ha per destination ft €/ha
1238 
1239  // C - Computing the corrected expected returns including transaction costs
1240 
1241  for(uint j2=0;j2<fTypes.size();j2++){
1242  string ft2 = fTypes[j2];
1243  double invTransCost = (gfd("invTransCost",regId,ft,ft2,DATA_NOW) * gfd("pol_fiSub",regId,ft,ft2,DATA_NOW) ) ;
1244  polBal_fiSub_unitvalue[j2] = gfd("invTransCost",regId,ft,ft2,DATA_NOW) * (gfd("pol_fiSub",regId,ft,ft2,DATA_NOW) - 1) ;
1245  corrExpectedReturns[j2] = (expectedReturns[j2]/ir)-invTransCost; // changed 20150718: npv = eai/ir + tr. cost // HUGE BUG 20151202: transaction costs should be REDUCED, not added to the npv...
1246  }
1247 
1248  //int highestReturnFtIndex = getMaxPos(corrExpectedReturns);
1249 
1250  // D - Assigning the Managed area
1251  // calculating freeArea at the end of the year and choosing the new regeneration area..
1252  //freeArea(i,essence,lambda) = sum(u, hv2fa(i,essence,lambda,u)*hr(u,i,essence,lambda,t)*V(u,i,lambda,essence,t-1)*100);
1253  //if(scen("endVreg") ,
1254  // regArea(i,essence,lambda,t) = freeArea(i,essence, lambda); // here we could introduce in/out area from other land usages
1255  //else
1256  // loop (i,
1257  // loop( (essence,lambda),
1258  // if ( expReturns(i,essence,lambda) = smax( (essence2,lambda2),expReturns(i,essence2,lambda2) ),
1259  // regArea (i,essence,lambda,t) = sum( (essence2, lambda2), freeArea(i,essence2, lambda2) ) * mr;
1260  // );
1261  // );
1262  // regArea(i,essence,lambda,t) = freeArea(i,essence, lambda)*(1-mr); // here we could introduce in/out area from other land usages
1263  // );
1264  //if (j==highestReturnFtIndex){
1265  // thisYearRegAreas[j] += totalHarvestedArea*mr;
1266  //}
1267  // If I Implement this I'll have a minimal diff in total area.. why ?????
1268 
1269  int optFtChosen = getMaxPos(corrExpectedReturns);
1270  px->optFtChosen.push_back(optFtChosen);
1271  px->optDcChosen.push_back(px->optDc.at(optFtChosen));
1272 
1273  thisYearRegAreas[getMaxPos(corrExpectedReturns)] += harvestedAreaForThisFT*mr;
1274  thisYearRegAreas[getMaxPos(expectedReturns)] += fArea_incr*mr/((double)fTypes.size()); // mr quota of new forest area assigned to highest expected returns ft (not considering transaction costs). Done for each forest types
1275 
1276  deltaAreas[j][getMaxPos(corrExpectedReturns)] += harvestedAreaForThisFT*mr;
1277  deltaAreas[fTypes.size()][getMaxPos(expectedReturns)] += fArea_incr*mr/((double)fTypes.size());
1278 
1279  polBal_fiSub[getMaxPos(corrExpectedReturns)] += harvestedAreaForThisFT*mr*polBal_fiSub_unitvalue[getMaxPos(corrExpectedReturns)];
1280 
1281  // E - Assigning unmanaged area
1282  //for(uint j2=0;j2<fTypes.size();j2++){
1283  if(natRegAllocation=="pp"){ // according to prob presence
1284  //string ft2 = fTypes[j2];
1285  string parentFt = MTHREAD->MD->getForTypeParentId(ft);
1286  double freq = rescaleFrequencies ? gfd("freq_norm",regId,parentFt,""):gfd("freq",regId,parentFt,""); // "probability of presence" for unmanaged forest, added 20140318
1287  double hAreaThisFtGroup = findMap(hAreaByFTypeGroup,parentFt);
1288  double hRatio = 1.0;
1289  if(hAreaThisFtGroup>0){
1290  //double harvestedAreaForThisFT2 = vSum(px->hArea.at(j2));
1291  hRatio = harvestedAreaForThisFT/hAreaThisFtGroup;
1292  } else {
1293  int nFtChilds = MTHREAD->MD->getNForTypesChilds(parentFt);
1294  hRatio = 1.0/nFtChilds;
1295  }
1296  thisYearRegAreas[j] += totalHarvestedArea*(1-mr)*freq*hRatio;
1297  thisYearRegAreas[j] += fArea_incr*(1-mr)*freq*hRatio; // non-managed quota of new forest area assigning proportionally on pp at sp group level
1298  //thisYearRegAreas[j2] += harvestedAreaForThisFT*(1-mr)*freq*hRatio;
1299  // Here is the problem with building a transaction matrix ft_from/ft_to: we don't explicitly assign from/to for unmanaged areas !
1300  for(uint j2=0;j2<fTypes.size();j2++){
1301  deltaAreas[j2][j] += vSum(px->hArea.at(j2))*(1-fArea_decr_rel)*(1-mr)*freq*hRatio;
1302  }
1303  deltaAreas[fTypes.size()][j] += fArea_incr*(1-mr)*freq*hRatio;
1304 
1305 
1306 
1307  } else { // prob presence not used..
1308 
1309  // Accounting for mortality arising from pathogens. Assigning the area to siblings according to area..
1310  // TODO: chek that mortRatePath doesn't involve adding forest area to deltaAreas[j][""]
1311 
1312  double mortRatePath = px->getPathMortality(ft, "0");
1313  if(mortRatePath > 0){
1314 
1315  string parentFt = MTHREAD->MD->getForTypeParentId(ft);
1316  vector <string> siblings = MTHREAD->MD->getForTypeChilds(parentFt);
1317  vector <double> siblingAreas;
1318  for(uint j2=0;j2<siblings.size();j2++){
1319  if(siblings[j2]==ft){
1320  siblingAreas.push_back(0.0);
1321  } else {
1322  //string debug_sibling_ft = siblings[j2];
1323  //int debug_positin = getPos(debug_sibling_ft,fTypes);
1324  double thisSiblingArea = vSum(px->area.at(getPos(siblings[j2],fTypes)));
1325  siblingAreas.push_back(thisSiblingArea);
1326  }
1327  }
1328  double areaAllSiblings = vSum(siblingAreas);
1329  thisYearRegAreas[j] += harvestedAreaForThisFT*(1-mr)*(1-mortRatePath);
1330  deltaAreas[j][j] += harvestedAreaForThisFT*(1-mr)*(1-mortRatePath);
1331 
1332  if(areaAllSiblings>0.0){ // area of siblings is >0: we attribute the area from the pathogen induced mortality to the siblings proportionally to area..
1333  for(uint j2=0;j2<siblings.size();j2++){
1334 // int debug1 = getPos(siblings[j2],fTypes);
1335 // double debug2= harvestedAreaForThisFT;
1336 // double debug3 = 1.0-mr;
1337 // double debug4 = mortRatePath;
1338 // double debug5 = siblingAreas[j2];
1339 // double debug6 = areaAllSiblings;
1340 // double debug7 = harvestedAreaForThisFT*(1.0-mr)*(mortRatePath)*(siblingAreas[j2]/areaAllSiblings);
1341  thisYearRegAreas[getPos(siblings[j2],fTypes)] += harvestedAreaForThisFT*(1.0-mr)*(mortRatePath)*(siblingAreas[j2]/areaAllSiblings);
1342  deltaAreas[j][getPos(siblings[j2],fTypes)] += harvestedAreaForThisFT*(1.0-mr)*(mortRatePath)*(siblingAreas[j2]/areaAllSiblings);
1343  }
1344  } else if (siblings.size()>1) { // area of all siblings is 0, we just give them the mortality area in equal parts..
1345  for(uint j2=0;j2<siblings.size();j2++){
1346  if (siblings[j2] != ft){
1347  thisYearRegAreas[getPos(siblings[j2],fTypes)] += harvestedAreaForThisFT*(1.0-mr)*(mortRatePath)* 1.0 / (( (float) siblings.size())-1.0);
1348  deltaAreas[j][getPos(siblings[j2],fTypes)] += harvestedAreaForThisFT*(1.0-mr)*(mortRatePath)* 1.0 / (( (float) siblings.size())-1.0);
1349  }
1350  }
1351  }
1352  } else { // mortRatePath == 0
1353  thisYearRegAreas[j] += harvestedAreaForThisFT*(1.0-mr);
1354  deltaAreas[j][j] += harvestedAreaForThisFT*(1.0-mr);
1355  }
1356 
1357  // Allocating non-managed quota of new forest area to ft proportionally to the current area share by ft
1358  double newAreaThisFt = vSum(px->area) ? fArea_incr*(1-mr)*vSum(px->area.at(j))/vSum(px->area): 0.0;
1359  thisYearRegAreas[j] += newAreaThisFt;
1360  deltaAreas[fTypes.size()][j] += newAreaThisFt;
1361 
1362  if(! (thisYearRegAreas[j] >= 0.0) ){
1363  msgOut(MSG_ERROR,"thisYearRegAreas[j] is not non negative (j: "+i2s(j)+", thisYearRegAreas[j]: "+i2s( thisYearRegAreas[j])+").");
1364  }
1365  //thisYearRegAreas[j2] += harvestedAreaForThisFT*(1-mr);
1366  }
1367  //}
1368  } // end for each forest type
1369 
1370  // adding regeneration area to the fist (00) diameter class
1371  for(uint j=0;j<fTypes.size();j++){
1372  px->area.at(j).at(0) += thisYearRegAreas.at(j);
1373  }
1374 
1375  #ifdef QT_DEBUG
1376  double totalRegArea = vSum(thisYearRegAreas);
1377  if (! (totalRegArea==0.0 && totalHarvestedArea==0.0)){
1378  double ratio = totalRegArea / totalHarvestedArea ;
1379  if(rescaleFrequencies && (ratio < 0.99999999999 || ratio > 1.00000000001) && forestAreaChangeMethod == "fixed") {
1380  msgOut(MSG_CRITICAL_ERROR, "Sum of regeneration areas not equal to sum of harvested area in runManagementModel()!");
1381  }
1382  }
1383  #endif
1384 
1385  px->regArea.insert(pair <int, vector<double> > (MTHREAD->SCD->getYear(), thisYearRegAreas));
1386  px->deltaArea = deltaAreas;
1387 
1388  } // end of each pixel
1389  if (-fArea_diff > regHArea){
1390  msgOut(MSG_WARNING,"In region "+ i2s(regId) + " the exogenous area decrement ("+ d2s(-fArea_diff) +" ha) is higger than the harvesting ("+ d2s(regHArea) +" ha). Ratio forced to 1.");
1391  }
1392 
1393  for(uint j=0;j<fTypes.size();j++){
1394  double debug = polBal_fiSub[j]/1000000;
1395  sfd((polBal_fiSub[j]/1000000.0),"polBal_fiSub",regId,fTypes[j],"",DATA_NOW,true); //M€
1396 
1397  }
1398  } // end of each region
1399 }
1400 
1401 void
1403  msgOut(MSG_INFO, "Cashing initial model settings..");
1404  MD = MTHREAD->MD;
1405  firstYear = MD->getIntSetting("initialYear");
1406  secondYear = firstYear+1;
1407  thirdYear = firstYear+2;
1408  WL2 = MD->getIntSetting("worldCodeLev2");
1409  regIds2 = MD->getRegionIds(2);
1410  priProducts = MD->getStringVectorSetting("priProducts");
1411  secProducts = MD->getStringVectorSetting("secProducts");
1413  allProducts.insert( allProducts.end(), secProducts.begin(), secProducts.end() );
1414  dClasses = MD->getStringVectorSetting("dClasses");
1415  pDClasses; // production diameter classes: exclude the fist diameter class below 15 cm
1416  pDClasses.insert(pDClasses.end(), dClasses.begin()+1, dClasses.end() );
1417  fTypes= MD->getForTypeIds();
1418  l2r = MD->getRegionIds();
1419 }
1420 
1421 void
1423  // Settings needs explicitly DATA_NOW to have a temporal dymension..
1424  msgOut(MSG_INFO, "Cashing model settings that may change every year..");
1425  regType = MTHREAD->MD->getStringSetting("regType",DATA_NOW); // how the regeneration should be computed (exogenous, from hr, from allocation choises)
1426  natRegAllocation = MTHREAD->MD->getStringSetting("natRegAllocation",DATA_NOW); // how to allocate natural regeneration
1427  rescaleFrequencies = MD->getBoolSetting("rescaleFrequencies",DATA_NOW);
1428  oldVol2AreaMethod = MD->getBoolSetting("oldVol2AreaMethod",DATA_NOW);
1429  //mr = MD->getDoubleSetting("mr");
1430  forestAreaChangeMethod = MTHREAD->MD->getStringSetting("forestAreaChangeMethod",DATA_NOW);
1431  ir = MD->getDoubleSetting("ir",DATA_NOW);
1432 }
1433 
1434 void
1436  msgOut(MSG_INFO, "Starting initializing pixel-level values");
1437 
1438  // pxVol = regVol * pxArea/regForArea
1439  // this function can be done only at the beginning of the model, as it assume that the distribution of volumes by diameter class in the pixels within a certain region is homogeneous, but as the model progress along the time dimension this is no longer true.
1440  if(!MD->getBoolSetting("usePixelData")) return;
1441  for(uint i=0;i<regIds2.size();i++){
1442  ModelRegion* reg = MD->getRegion(regIds2[i]);
1443  vector <Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regIds2[i]);
1444  for (uint j=0;j<rpx.size();j++){
1445  //int debugPx = rpx[j]->getID();
1446  //int debug2 = debugPx;
1447  rpx[j]->vol.clear(); // not actually necessary
1448  for(uint y=0;y<fTypes.size();y++){
1449  vector <double> vol_byu;
1450  double regForArea = reg->getValue("forArea_"+fTypes[y]);
1451  for (uint z=0;z<dClasses.size();z++){
1452  double regVol;
1453  regVol = z ? gfd("vol",regIds2[i],fTypes[y],dClasses[z],firstYear) : 0 ; // if z=0-> regVol= gfd(), otherwise regVol=0;
1454  double pxArea = rpx[j]->getDoubleValue("forArea_"+fTypes[y], true); // bug solved 20121109. get zero for not data
1455  if (pxArea<0.0){
1456  msgOut(MSG_CRITICAL_ERROR,"Error in initializePixelVolumes, negative pxArea!");
1457  }
1458  double pxVol = regForArea ? regVol * pxArea/regForArea: 0; // if we introduce new forest types without initial area we must avoid a 0/0 division
1459  //rpx[j]->changeValue(pxVol,"vol",fTypes[y],dClasses[z],firstYear);
1460  vol_byu.push_back(pxVol);
1461  }
1462  rpx[j]->vol.push_back(vol_byu);
1463  }
1464  }
1465  }
1467 }
1468 
1469 /**
1470  * @brief ModelCoreSpatial::assignSpMultiplierPropToVols assigns the spatial multiplier (used in the time of return) based no more on a Normal distribution but on the volumes present in the pixel: more volume, more the pixel is fit for the ft
1471  *
1472  * This function apply to the pixel a multiplier of time of passage that is inversely proportional to the volumes of that forest type present in the pixel.
1473  * The idea is that in the spots where we observe more of a given forest type are probabily the most suited ones to it.
1474  *
1475  * The overall multipliers **of time of passage** (that is, the one returned by Pixel::getMultiplier("tp_multiplier") ) will then be the product of this multiplier that account for spatial heterogeneity and of an eventual exogenous
1476  * multiplier that accounts for different scenarios among the spatio-temporal dimensions.
1477  *
1478  * Given that (forest type index omitted):
1479  * - \f$V_{p}\f$ = volume of a given ft in each pixel (p)
1480  * - \f$\bar{g}\f$ and \f$\sigma_{g}\f$ = regional average and standard deviation of the growth rate
1481  * - \f$m_{p}\f$ = multiplier of time of passage
1482  *
1483  * This multiplier is computed as:
1484  * - \f$ v_{p} = max(V) - V_{p}~~ \f$ A diff from the max volume is computed in each pixel
1485  * - \f$ vr_{p} = v_{p} * \bar{g}/\bar{v}~~ \f$ The volume diff is rescaled to match the regional growth rate
1486  * - \f$ vrd_{p} = vr_{p} - \bar{vr}~~ \f$ Deviation of the rescaled volumes are computed
1487  * - \f$ vrdr_{p} = vrd_{p} * \sigma_{g}/\sigma_{vr}~~ \f$ The deviations are then rescaled to match the standard deviations of the regional growth rate
1488  * - \f$ m_{p} = (vrdr_{p} + \bar{vr}) / \bar{g}~~ \f$ The multiplier is computed from the ratio of the average rescaled volumes plus rescaled deviation over the average growth rate.
1489  *
1490  * And it has the following properties:
1491  * - \f$\bar{m} = 1\f$
1492  * - \f$\sigma_{m} = cv_{g}\f$
1493  * - \f$m_{p} = V_{p}*\alpha+\beta\f$
1494  * - \f$m_{\bar{V}} = 1\f$
1495  *
1496  * For spreadsheet "proof" see the file computation_of_growth_multipliers_from_know_avg_sd_and_proportional_to_share_of_area_in_each_pixel.ods
1497  */
1498 void
1500 
1501  if(!MTHREAD->MD->getBoolSetting("useSpatialVarPropToVol")){return;}
1502  for(uint r=0;r<regIds2.size();r++){
1503  int rId = regIds2[r];
1504  ModelRegion* reg = MD->getRegion(regIds2[r]);
1505  vector <Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regIds2[r]);
1506  for(uint f=0;f<fTypes.size();f++){
1507  string ft = fTypes[f];
1508  double agr = gfd("agr",regIds2[r],ft,"");
1509  double sStDev = gfd("sStDev",regIds2[r],ft,"");
1510  vector<double> vols;
1511  vector<double> diffVols;
1512  vector<double> diffVols_rescaled;
1513  double diffVols_rescaled_deviation;
1514  double diffVols_rescaled_deviation_rescaled;
1515  double final_value;
1516  double multiplier;
1517  vector<double> multipliers; // for tests
1518 
1519  double vol_max, rescale_ratio_avg, rescale_ratio_sd;
1520  double diffVols_avg, diffVols_rescaled_avg;
1521  double diffVols_rescaled_sd;
1522 
1523  for (uint p=0;p<rpx.size();p++){
1524  Pixel* px = rpx[p];
1525  vols.push_back(vSum(px->vol[f]));
1526  } // end for each pixel
1527  vol_max=getMax(vols);
1528 
1529  for(uint p=0;p<vols.size();p++){
1530  diffVols.push_back(vol_max-vols[p]);
1531  }
1532 
1533  diffVols_avg = getAvg(diffVols);
1534  rescale_ratio_avg = (diffVols_avg != 0.0) ? agr/diffVols_avg : 1.0;
1535  for(uint p=0;p<diffVols.size();p++){
1536  diffVols_rescaled.push_back(diffVols[p]*rescale_ratio_avg);
1537  }
1538  diffVols_rescaled_avg = getAvg(diffVols_rescaled);
1539  diffVols_rescaled_sd = getSd(diffVols_rescaled,false);
1540 
1541  rescale_ratio_sd = (diffVols_rescaled_sd != 0.0) ? sStDev/diffVols_rescaled_sd : 1.0;
1542  for(uint p=0;p<diffVols_rescaled.size();p++){
1543  diffVols_rescaled_deviation = diffVols_rescaled[p] - diffVols_rescaled_avg;
1544  diffVols_rescaled_deviation_rescaled = diffVols_rescaled_deviation * rescale_ratio_sd;
1545  final_value = diffVols_rescaled_avg + diffVols_rescaled_deviation_rescaled;
1546  multiplier = (agr != 0.0) ? min(1.6, max(0.4,final_value/agr)) : 1.0; //20151130: added bounds for extreme cases. Same bonds as in Gis::applySpatialStochasticValues()
1547  // multiplier = 1.0;
1548 
1549  Pixel* px = rpx[p];
1550  px->setSpModifier(multiplier,f);
1551  multipliers.push_back(multiplier);
1552  }
1553 
1554  #ifdef QT_DEBUG
1555  // Check relaxed as we introduced bounds that may change slighly the avg and sd...
1556  double avgMultipliers = getAvg(multipliers);
1557  double sdMultipliers = getSd(multipliers,false);
1558  if ( avgMultipliers < 0.9 || avgMultipliers > 1.1){
1559  msgOut(MSG_CRITICAL_ERROR, "The average of multipliers of ft "+ ft +" for the region " + i2s(rId) + " is not 1!");
1560  }
1561  if ( ( sdMultipliers - (sStDev/agr) ) < -0.5 || ( sdMultipliers - (sStDev/agr) ) > 0.5 ){
1562  double cv = sStDev/agr;
1563  msgOut(MSG_CRITICAL_ERROR, "The sd of multipliers of ft "+ ft +" for the region " + i2s(rId) + " is not equal to the spatial cv for the region!");
1564  }
1565  #endif
1566  } // end for each ft
1567  } // end for each region
1568 }
1569 
1570 
1571 
1572 void
1574 
1575  ///< call initialiseDeathBiomassStocks(), initialiseProductsStocks() and initialiseEmissionCounters()
1577 
1578  for(uint i=0;i<regIds2.size();i++){
1579  vector<double> deathBiomass;
1580  for(uint j=0;j<fTypes.size();j++){
1581  double deathBiomass_ft = gfd("vMort",regIds2[i],fTypes[j],DIAM_ALL,DATA_NOW)+gfd("vMortAdd",regIds2[i],fTypes[j],DIAM_ALL,DATA_NOW);
1582  deathBiomass.push_back(deathBiomass_ft);
1583  }
1584  MTHREAD->CBAL->initialiseDeathBiomassStocks(deathBiomass, regIds2[i]);
1585  vector<double>qProducts;
1586  for(int p=0;p<priProducts.size();p++){
1587  // for the primary products we consider only the exports as the domestic consumption is entirelly transformed in secondary products
1588  double int_exports = gpd("sa",regIds2[i],priProducts[p],DATA_NOW);
1589  qProducts.push_back(int_exports);
1590  }
1591  for(int p=0;p<secProducts.size();p++){
1592  // for the tranformed product we skip those that are imported, hence derived from other forest systems
1593  double consumption = gpd("dl",regIds2[i],secProducts[p],DATA_NOW); // dl = sl + net regional imports
1594  qProducts.push_back(consumption);
1595  }
1596  MTHREAD->CBAL->initialiseProductsStocks(qProducts, regIds2[i]);
1597 
1598  }
1599 }
1600 
1601 void
1603  int currentYear = MTHREAD->SCD->getYear();
1604  for(int y=currentYear;y>currentYear-30;y--){
1605  for(uint i=0;i<regIds2.size();i++){
1606  for(uint j=0;j<fTypes.size();j++){
1607  for (uint u=0;u<dClasses.size();u++){
1608  iisskey key(y,regIds2[i],fTypes[j],dClasses[u]);
1610  }
1611  }
1612  }
1613  }
1614 }
1615 
1616 /**
1617  * @brief ModelCoreSpatial::initializePixelArea
1618  *
1619  * This function compute the initial area by ft and dc. It requires vHa computed in computeCumulativeData, this is why it is
1620  * separated form the other initialisedPixelValues().
1621  * As the sum of area computed using vHa may differ from the one memorised in forArea_* layer, all values are scaled to match
1622  * it before being memorised.
1623  * Also assign area = area_l
1624  */
1625 
1626 void
1628  msgOut(MSG_INFO, "Starting initializing pixel-level area");
1629  if(!MD->getBoolSetting("usePixelData")) return;
1630  for(uint i=0;i<regIds2.size();i++){
1631  ModelRegion* reg = MD->getRegion(regIds2[i]);
1632  vector <Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regIds2[i]);
1633  for (uint p=0;p<rpx.size();p++){
1634  Pixel* px = rpx[p];
1635  double pxid= px->getID();
1636  for(uint j=0;j<fTypes.size();j++){
1637  string ft = fTypes[j];
1638  vector <double> tempAreas;
1639  vector <double> areasByFt;
1640  double pxArea = px->getDoubleValue("forArea_"+ft,true)/10000.0; //ha
1641  for (uint u=0;u<dClasses.size();u++){
1642  if(u==0){
1643  double regionArea = reg->getValue("forArea_"+ft,OP_SUM)/10000.0; //ha
1644  double regRegVolumes = gfd("vReg",regIds2[i],ft,""); // regional regeneration volumes.. ugly name !!
1645  double newVReg = regionArea ? regRegVolumes*pxArea/regionArea : 0.0;
1646  double tp_u0 = px->tp.at(j).at(0); // time of passage to reach the first production diameter class
1647  double entryVolHa = gfd("entryVolHa",regIds2[i],ft,"");
1648  double tempArea = (newVReg*1000000.0/entryVolHa)*tp_u0;
1649  tempAreas.push_back(tempArea);
1650  } else {
1651  string dc = dClasses[u];
1652  double dcVol = px->vol_l.at(j).at(u)*1000000.0; // m^3
1653  double dcVHa = px->vHa.at(j).at(u); // m^3/ha
1654  #ifdef QT_DEBUG
1655  if(dcVol < 0.0 || dcVHa < 0.0){
1656  msgOut(MSG_CRITICAL_ERROR, "Negative volumes or density in initializePixelArea");
1657  }
1658  #endif
1659  double tempArea = dcVHa?dcVol/dcVHa:0;
1660  tempAreas.push_back(tempArea);
1661  }
1662 
1663  } // end dc
1664  double sumTempArea = vSum(tempAreas);
1665  // double sharedc0 = 5.0/90.0; // an arbitrary share of total area allocated to first diameter class
1666  //tempAreas.at(0) = sumTempArea * sharedc0;
1667  //sumTempArea = vSum(tempAreas);
1668  double normCoef = sumTempArea?pxArea/ sumTempArea:0;
1669  //cout << i << '\t' << pxid << '\t' << ft << '\t' << normCoef << endl;
1670  #ifdef QT_DEBUG
1671  if(normCoef < 0.0){
1672  msgOut(MSG_CRITICAL_ERROR, "Negative normCoef in initializePixelArea");
1673  }
1674  #endif
1675  for (uint u=0;u<dClasses.size();u++){
1676  areasByFt.push_back(tempAreas.at(u)*normCoef); //manca la costruzione originale del vettore
1677  }
1678  #ifdef QT_DEBUG
1679  if (pxArea != 0.0){
1680  double ratio = vSum(areasByFt)/ pxArea; // vSum(areasByFt) should be equal to pxArea
1681  if(ratio < 0.99999999999 || ratio > 1.00000000001) {
1682  msgOut(MSG_CRITICAL_ERROR, "pxArea is not equal to vSum(areasByFt) in initializePixelArea");
1683  }
1684  }
1685  #endif
1686  px->area_l.push_back(areasByFt);
1687  /// \todo here I have finally also area_ft_dc_px and I can implement the new one I am in 2006
1688  } // end ft
1689  px->area = px->area_l; //Assigning initial value of area to the area of the old year
1690  } // end px
1691  } // end region
1692  loadExogenousForestLayers("area");
1693  /// \todo: also update area_l
1694 }
1695 
1696 void
1698 
1699  msgOut(MSG_INFO, "Starting computing some cumulative values..");
1700  int thisYear = MTHREAD->SCD->getYear();
1701 
1702 // double sumCumTP=0;
1703 // double sumVHa = 0;
1704 // double count = 0;
1705 // double avg_sumCumTp;
1706 // double avg_sumVHa;
1707 
1708  for(uint r2= 0; r2<regIds2.size();r2++){
1709  int regId = regIds2[r2];
1710  regPx = MTHREAD->MD->getRegion(regId)->getMyPixels();
1711 
1712  for (uint p=0;p<regPx.size();p++){
1713  Pixel* px = regPx[p];
1714  px->cumTp.clear();
1715  px->cumTp_exp.clear();
1716  px->vHa_exp.clear();
1717  px->vHa.clear();
1718  px->cumAlive.clear();
1719  px->cumAlive_exp.clear();
1720  double expType = px->expType;
1721 
1722  for(uint j=0;j<fTypes.size();j++){
1723  string ft = fTypes[j];
1724 
1725  double tp_multiplier_now = px->getMultiplier("tp_multiplier",ft,DATA_NOW);
1726  double tp_multiplier_t0 = px->getMultiplier("tp_multiplier",ft,firstYear);
1727  double mortCoef_multiplier_now = px->getMultiplier("mortCoef_multiplier",ft,DATA_NOW);
1728  double mortCoef_multiplier_t0 = px->getMultiplier("mortCoef_multiplier",ft,firstYear);
1729  double betaCoef_multiplier_now = px->getMultiplier("betaCoef_multiplier",ft,DATA_NOW);
1730  double betaCoef_multiplier_t0 = px->getMultiplier("betaCoef_multiplier",ft,firstYear);
1731  double pathMort_now, pathMort_t0;
1732 
1733  // calculating the cumulative time of passage and the (cumulativelly generated) vHa for each diameter class (depending on forest owners diam growth expectations)
1734  //loop(u$(ord(u)=1),
1735  // cumTp(u,i,lambda,essence) = tp_u1(i,essence,lambda);
1736  //);
1737  //loop(u$(ord(u)>1),
1738  // cumTp(u,i,lambda,essence) = cumTp(u-1,i,lambda,essence)+tp(u-1,i,lambda,essence);
1739  //);
1740  ////ceil(x) DNLP returns the smallest integer number greater than or equal to x
1741  //loop( (u,i,lambda,essence),
1742  // cumTp(u,i,lambda,essence) = ceil(cumTp(u,i,lambda,essence));
1743  //);
1744  vector <double> cumTp_temp; // cumulative time of passage to REACH a diameter class (tp is to LEAVE to the next one)
1745  vector <double> vHa_temp; // volume at hectar by each diameter class [m^3/ha]
1746  vector <double> cumAlive_temp; // cumulated alive rate to reach a given diameter class
1747  vector <double> cumTp_exp_temp; // expected version of cumTp_temp
1748  vector <double> vHa_exp_temp; // expected version of vHa_temp
1749  vector <double> cumAlive_exp_temp; // "expected" version of cumMort
1750 
1751  MD->setErrorLevel(MSG_NO_MSG); // as otherwise on 2007 otherwise sfd() will complain that is filling multiple years (2006 and 2007)
1752  for (uint u=0; u<dClasses.size(); u++){
1753  string dc = dClasses[u];
1754  double cumTp_u, cumTp_u_exp, cumTp_u_noExp, cumTp_u_fullExp;
1755  double tp, tp_exp, tp_noExp, tp_fullExp;
1756  double vHa_u, vHa_u_exp, vHa_u_noExp, vHa_u_fullExp, beta, beta_exp, beta_noExp, beta_fullExp, mort, mort_exp, mort_noExp, mort_fullExp;
1757  double cumAlive_u, cumAlive_exp_u;
1758  pathMort_now = px->getPathMortality(ft,dc,DATA_NOW);
1759  pathMort_t0 = px->getPathMortality(ft,dc,firstYear);
1760  double additionalMortality_multiplier_reg_ft_dc = gfd("additionalMortality_multiplier",regId,ft,dc, DATA_NOW);
1761  double additionalMortality_multiplier_reg_ft_dc_t0 = gfd("additionalMortality_multiplier",regId,ft,dc, firstYear);
1762  double additionalMortality_now = px->getSTData("additionalMortality", ft, DATA_NOW, dc, 0.0)*additionalMortality_multiplier_reg_ft_dc;
1763  double additionalMortality_t0 = px->getSTData("additionalMortality", ft, firstYear, dc, 0.0)*additionalMortality_multiplier_reg_ft_dc_t0;
1764 
1765  // only cumTp is depending for the expectations, as it is what it is used by owner to calculate return of investments.
1766  // the tp, beta and mort coefficients instead are the "real" ones as predicted by scientist for that specific time
1767 
1768  if(u==0) {
1769  // first diameter class.. expected and real values are the same (0)
1770  cumTp_u = 0.;
1771  vHa_u = 0.;
1772  cumAlive_u = 1.;
1773  cumTp_temp.push_back(cumTp_u);
1774  vHa_temp.push_back(vHa_u);
1775  cumTp_exp_temp.push_back(cumTp_u);
1776  vHa_exp_temp.push_back(vHa_u);
1777  cumAlive_temp.push_back(cumAlive_u);
1778  cumAlive_exp_temp.push_back(cumAlive_u);
1779  } else {
1780  // other diameter classes.. first dealing with real values and then with expected ones..
1781  // real values..
1782  // real values..
1783  tp = gfd("tp",regId,ft,dClasses[u-1],thisYear)*tp_multiplier_now;
1784  cumTp_u = cumTp_temp[u-1] + tp;
1785  if (u==1){
1786  /**
1787  Note on the effect of mortality modifiers on the entryVolHa.
1788  Unfortunatly for how it is defined the mortality multiplier (the ratio with the new mortality rate over the old one) we can't
1789  compute a entryVolHa based on it. It is NOT infact just like: vHa_adjusted = vHa_orig / mort_multiplier.
1790  The effect of mortality on the vHa of the first diameter class is unknow, and so we can't compute the effect of a relative
1791  increase.
1792  */
1793  vHa_u = gfd("entryVolHa",regId,ft,"",thisYear);
1794  mort = 0.; // not info about mortality first diameter class ("00")
1795  } else {
1796  mort = 1-pow(1-min(gfd("mortCoef",regId,ft,dClasses[u-1],thisYear)*mortCoef_multiplier_now+pathMort_now+additionalMortality_now,1.0),tp); // mortality of the previous diameter class
1797  beta = gfd("betaCoef",regId,ft,dc, thisYear)*betaCoef_multiplier_now;
1798  vHa_u = vHa_temp[u-1]*beta*(1-mort);
1799  }
1800  cumAlive_u = max(0.,cumAlive_temp[u-1]*(1-mort));
1801  cumAlive_temp.push_back(cumAlive_u);
1802  cumTp_temp.push_back(cumTp_u);
1803  vHa_temp.push_back(vHa_u);
1804  // expected values..
1805  /**
1806  param expType Specify how the forest owners (those that make the investments) behave will be the time of passage in the future in order to calculate the cumulative time of passage in turn used to discount future revenues.
1807  Will forest owners behave adaptively believing the time of passage between diameter classes will be like the observed one at time they make decision (0) or they will have full expectations believing forecasts (1) or something in the middle ?
1808  For compatibility with the GAMS code, a -1 value means using initial simulation tp values (fixed cumTp)."
1809  */
1810  if (expType == -1){
1811  tp_exp = gfd("tp",regId,ft,dClasses[u-1],firstYear)*tp_multiplier_t0;
1812  //tp = px->tp.at(u); no. not possible, tp stored at pixel level is the current year one
1813  cumTp_u_exp = cumTp_exp_temp[u-1]+tp_exp;
1814  cumTp_exp_temp.push_back(cumTp_u_exp);
1815  if(u==1) {
1816  vHa_u_exp = gfd("entryVolHa",regId,ft,"",firstYear);
1817  mort_exp = 0.; // not info about mortality first diameter class ("00")
1818  } else {
1819  mort_exp = 1-pow(1-min(gfd("mortCoef",regId,ft,dClasses[u-1], firstYear)*mortCoef_multiplier_t0+pathMort_t0+additionalMortality_t0,1.0),tp_exp); // mortality rate of previous diameter class
1820  beta_exp = gfd("betaCoef",regId,ft,dc, firstYear)*betaCoef_multiplier_t0;
1821  vHa_u_exp = vHa_exp_temp[u-1]*beta_exp*(1-mort_exp);
1822  }
1823  } else {
1824  double tp_multiplier_dynamic = px->getMultiplier("tp_multiplier",ft,thisYear+ceil(cumTp_exp_temp[u-1]));
1825  tp_noExp = gfd("tp",regId,ft,dClasses[u-1])*tp_multiplier_now;
1826  cumTp_u_noExp = cumTp_exp_temp[u-1]+tp_noExp;
1827  tp_fullExp = gfd("tp",regId,ft,dClasses[u-1],thisYear+ceil(cumTp_exp_temp[u-1]))*tp_multiplier_dynamic ; // time of passage that there should be to reach this diameter class in the year where the previous diameter class will be reached
1828  cumTp_u_fullExp = cumTp_exp_temp[u-1]+tp_fullExp ; // it adds to the time of passage to reach the previous diameter class the time of passage that there should be to reach this diameter class in the year where the previous diameter class will be reached
1829  cumTp_u_exp = cumTp_u_fullExp*expType+cumTp_u_noExp*(1-expType); // 20121108: it's math the same as cumTp_exp_temp[u-1] + tp
1830  cumTp_exp_temp.push_back(cumTp_u_exp);
1831  if(u==1) {
1832  vHa_u_noExp = gfd("entryVolHa",regId,ft,"",DATA_NOW);
1833  vHa_u_fullExp = gfd("entryVolHa",regId,ft,"",thisYear+ceil(cumTp_u));
1834  vHa_u_exp = vHa_u_fullExp*expType+vHa_u_noExp*(1-expType);
1835  mort_exp = 0.; // not info about mortality first diameter class ("00")
1836  } else {
1837  mort_noExp = 1-pow(1-min(1.0,gfd("mortCoef",regId,ft,dClasses[u-1], DATA_NOW)*mortCoef_multiplier_now+pathMort_now+additionalMortality_now), tp_noExp); // mortCoef is a yearly value. Mort coeff between class is 1-(1-mortCoeff)^tp
1838  double mortCoef_multiplier_dynamic = px->getMultiplier("mortCoef_multiplier",ft,thisYear+ceil(cumTp_exp_temp[u-1]));
1839  double pathMort_dynamic = px->getPathMortality(ft,dc,thisYear+ceil(cumTp_exp_temp[u-1]));
1840  double additionalMortality_multiplier_reg_ft_dc_dynamic = gfd("additionalMortality_multiplier",regId,ft,dc, thisYear+ceil(cumTp_exp_temp[u-1]));
1841  double additionalMortality_dynamic = px->getSTData("additionalMortality", ft, thisYear+ceil(cumTp_exp_temp[u-1]), dc, 0.0)*additionalMortality_multiplier_reg_ft_dc_dynamic;
1842  mort_fullExp = 1-pow(1-min(1.0,gfd("mortCoef",regId,ft,dClasses[u-1],thisYear+ceil(cumTp_exp_temp[u-1]))*mortCoef_multiplier_dynamic+pathMort_dynamic+additionalMortality_dynamic),tp_fullExp); // mortality of the previous diameter class
1843  //double debug1 = gfd("mortCoef",regId,ft,dClasses[u-1],thisYear+ceil(cumTp_exp_temp[u-1]));
1844  //double debug2 = debug1*mortCoef_multiplier_dynamic+pathMort_dynamic;
1845  //double debug3 = min(1.0,debug2);
1846  //double debug4 = 1.0-debug3;
1847  //double debug5 = pow(debug4,tp_fullExp);
1848  //double debug6 = 1.0-debug5;
1849 
1850 
1851  beta_noExp = gfd("betaCoef",regId,ft,dc, DATA_NOW)*betaCoef_multiplier_now;
1852  double betaCoef_multiplier_dynamic = px->getMultiplier("betaCoef_multiplier",ft,thisYear+ceil(cumTp_u));
1853  beta_fullExp = gfd("betaCoef",regId,ft,dc, thisYear+ceil(cumTp_u))*betaCoef_multiplier_dynamic;
1854  mort_exp = mort_fullExp*expType+mort_noExp*(1-expType);
1855  beta_exp = beta_fullExp*expType+beta_noExp*(1-expType);
1856  vHa_u_exp = vHa_exp_temp[u-1]*beta_exp*(1-mort_exp); // BUG !!! mort is yearly value, not between diameter class. SOLVED 20121108
1857  }
1858  }
1859  vHa_exp_temp.push_back(vHa_u_exp);
1860  cumAlive_exp_u = max(0.,cumAlive_exp_temp[u-1]*(1-mort_exp));
1861  cumAlive_exp_temp.push_back(cumAlive_exp_u);
1862 
1863  //cout << "********" << endl;
1864  //cout << "dc;mort;cumAlive;cumAlive_exp "<< endl ;
1865  //cout << dClasses[u] << ";"<< mort << ";" << cumAlive_u << ";" << cumAlive_exp_u << endl;
1866 
1867  }
1868  // debug stuff on vHa
1869  //double vHa_new = gfd("vHa",regId,ft,dc,DATA_NOW);
1870  //double hv2fa_old = gfd("hv2fa",regId,ft,dc,DATA_NOW);
1871  //cout << "Reg|Ft|dc|vHa (new)|1/hv2fa (old): " << regId << " | " << ft;
1872  //cout << " | " << dc << " | " << vHa_new << " | " << 1/hv2fa_old << endl;
1873 
1874  } // end of each diam
1875  //double pixID = px->getID();
1876  //cout << thisYear << ";"<< regIds2[r2] << ";" << pixID << ";" << ft << ";" << cumTp_exp_temp[3] << ";" << vHa_exp_temp[3] << endl;
1877  px->cumTp.push_back(cumTp_temp);
1878  px->vHa.push_back(vHa_temp);
1879  px->cumAlive.push_back(cumAlive_temp);
1880  px->cumTp_exp.push_back(cumTp_exp_temp);
1881  px->vHa_exp.push_back(vHa_exp_temp);
1882  px->cumAlive_exp.push_back(cumAlive_exp_temp);
1883 
1884  //sumCumTP += cumTp_exp_temp[3];
1885  //sumVHa += vHa_exp_temp[3];
1886  //count ++;
1887 
1888 
1889  } // end of each ft
1890  //double debug = 0.0;
1891  } // end of each pixel
1892  } // end of each region
1894  //avg_sumCumTp = sumCumTP/ count;
1895  //avg_sumVHa = sumVHa / count;
1896  //cout << "Avg sumCumTp_35 and sumVha_35: " << avg_sumCumTp << " and " << avg_sumVHa << " (" << count << ")" << endl;
1897  //exit(0);
1898 }
1899 
1900 void
1902  msgOut(MSG_INFO, "Starting resetting pixel level values");
1903  for(uint r2= 0; r2<regIds2.size();r2++){
1904  int regId = regIds2[r2];
1905  regPx = MTHREAD->MD->getRegion(regId)->getMyPixels();
1906  for (uint p=0;p<regPx.size();p++){
1907  Pixel* px = regPx[p];
1908  px->swap(VAR_VOL); // vol_l = vol
1909  px->swap(VAR_AREA); // area_l = area
1910  // 20121108 BUG! Solved, used empty (just return true if the vector is empty) instead of clear (it actually clears the vector)
1911  px->vol.clear(); // by ft,dc
1912  px->area = px->area_l; // ATTENCTION, DIFFERENT FROM THE OTHERS. Here it is not cleared, it is assigned the previous year as default
1913  /*px->area.clear(); // by ft,dc*/
1914  px->hArea.clear(); // by ft, dc
1915  px->deltaArea.clear();
1916  //px->regArea.clear(); // by year, ft NO, this one is a map, it doesn't need to be changed
1917  px->hVol.clear(); // by ft, dc
1918  px->hVol_byPrd.clear(); // by ft, dc, pp
1919  //px->in.clear(); // by pp
1920  //px->hr.clear(); // by pp
1921  px->vReg.clear(); // by ft
1922  px->expectedReturns.clear(); // by ft
1923  px->beta.clear();
1924  px->mort.clear();
1925  px->addMort.clear(); // fire mortality
1926  px->tp.clear();
1927  px->cumTp.clear();
1928  px->vHa.clear();
1929  px->cumTp_exp.clear();
1930  px->vHa_exp.clear();
1931  px->cumAlive.clear();
1932  px->cumAlive_exp.clear();
1933  px->vMort.clear();
1934  px->vMortAdd.clear(); // fire mortality
1935  //std::fill(rpx[j]->vMort.begin(), rpx[j]->vMort.end(), 0.0);
1936 
1937  }
1938  }
1939 }
1940 
1941 void
1943 
1944  msgOut(MSG_INFO, "Starting cashing on pixel spatial-level exogenous data");
1945  bool applyAvalCoef = MTHREAD->MD->getBoolSetting("applyAvalCoef");
1946 
1947  for(uint r2= 0; r2<regIds2.size();r2++){
1948  int regId = regIds2[r2];
1949  regPx = MTHREAD->MD->getRegion(regId)->getMyPixels();
1950  for (uint p=0;p<regPx.size();p++){
1951  Pixel* px = regPx[p];
1952  px->tp.clear();
1953  px->beta.clear();
1954  px->mort.clear();
1955  px->addMort.clear(); // fire mortality
1956  px->avalCoef.clear();
1957 
1958  for(uint j=0;j<fTypes.size();j++){
1959  string ft = fTypes[j];
1960  vector <double> tp_byu;
1961  vector <double> beta_byu;
1962  vector <double> mort_byu;
1963  vector <double> addMort_byu; // fire mortality
1964 
1965  double tp_multiplier_now = px->getMultiplier("tp_multiplier",ft,DATA_NOW);
1966  double mortCoef_multiplier_now = px->getMultiplier("mortCoef_multiplier",ft,DATA_NOW);
1967 
1968  double betaCoef_multiplier_now = px->getMultiplier("betaCoef_multiplier",ft,DATA_NOW);
1969  double avalCoef_ft = applyAvalCoef?px->getSTData("avalCoef",ft,DATA_NOW):1.0;
1970 
1971  if(avalCoef_ft == MTHREAD->MD->getDoubleSetting("noValue")){
1972  msgOut(MSG_CRITICAL_ERROR,"applyAvalCoef has benn asked for, but I can't find the avalCoef in the data!");
1973  }
1974 
1975  for (uint u=0; u<dClasses.size(); u++){
1976  string dc = dClasses[u];
1977  double pathMortality = px->getPathMortality(ft,dc,DATA_NOW);
1978  double additionalMortality_multiplier_reg_ft_dc = gfd("additionalMortality_multiplier",regId,ft,dc, DATA_NOW);
1979  double additionalMortality = px->getSTData("additionalMortality", ft, DATA_NOW, dc, 0.0)*additionalMortality_multiplier_reg_ft_dc; // these are volumes not rates !
1980  double tp, beta_real, mort_real;
1981  if (u==0){
1982  // tp of first diameter class not making it change across the time dimension, otherwise problems in getting the rigth past
1983  // regenerations. BUT good, px->tp.at(0) is used only to pick up the right regeneration, so the remaining of the model
1984  // uses the getMultiplier version and cumTp consider the dynamic effects also in the first dc.
1985  tp = gfd("tp",regId,ft,dClasses[u],firstYear)*px->getMultiplier("tp_multiplier",ft,firstYear); // tp is defined also in the first diameter class, as it is the years to reach the NEXT diameter class
1986  } else {
1987  tp = gfd("tp",regId,ft,dClasses[u],DATA_NOW)*tp_multiplier_now; // tp is defined also in the first diameter class, as it is the years to reach the NEXT diameter class
1988  }
1989  beta_real = u?gfd("betaCoef",regId,ft,dClasses[u],DATA_NOW)*betaCoef_multiplier_now:0;
1990  mort_real = min(u?gfd("mortCoef",regId,ft,dClasses[u],DATA_NOW)*mortCoef_multiplier_now+pathMortality :0,1.0); // 20230428 bugfix Antonello, removed additionalMortality //Antonello, bug fixed 20160203: In any case, natural plus pathogen mortality can not be larger than 1!
1991  tp_byu.push_back(tp);
1992  beta_byu.push_back(beta_real);
1993  mort_byu.push_back(mort_real);
1994  addMort_byu.push_back(additionalMortality); // fire mortality
1995  } // end of each dc
1996  px->tp.push_back(tp_byu);
1997  px->beta.push_back(beta_byu);
1998  px->mort.push_back(mort_byu);
1999  px->addMort.push_back(addMort_byu); // fire mortality
2000  px->avalCoef.push_back(avalCoef_ft);
2001  } // end of each ft
2002  } // end of each pixel
2003  } // end of each region
2004 }
2005 
2006 void
2008  msgOut(MSG_INFO, "Starting computing inventary available for this year..");
2009  int nbounds = pow(2,priProducts.size());
2010  vector<vector<int>> concernedPriProductsTotal = MTHREAD->MD->createCombinationsVector(priProducts.size());
2011  int currentYear = MTHREAD->SCD->getYear();
2012 
2013  for(uint i=0;i<regIds2.size();i++){
2014  int r2 = regIds2[i];
2015  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2016  //Gis* GIS = MTHREAD->GIS;
2017  regPx = REG->getMyPixels();
2018  vector <double> in_reg(priProducts.size(),0.); // should have ceated a vector of size priProducts.size(), all filled with zeros
2019  vector <double> in_deathTimber_reg(priProducts.size(),0.); // should have ceated a vector of size priProducts.size(), all filled with zeros
2020  for (uint p=0;p<regPx.size();p++){
2021  Pixel* px = regPx[p];
2022  //int debugPx = px->getID();
2023  //int debug2 = debugPx;
2024  //px->in.clear();
2025  for(uint pp=0;pp<priProducts.size();pp++){
2026  double in = 0;
2027  for(uint ft=0;ft<fTypes.size();ft++){
2028  for(uint dc=0;dc<dClasses.size();dc++){
2029  in += app(priProducts[pp],fTypes[ft],dClasses[dc])*px->vol_l.at(ft).at(dc)*px->avalCoef.at(ft);
2030  }
2031  }
2032  //px->in.push_back(in);
2033  in_reg.at(pp) += in;
2034  } // end of each priProduct
2035  } // end each pixel
2036 
2037 
2038  for(uint pp=0;pp<priProducts.size();pp++){
2039  vector<string> priProducts_vector;
2040  priProducts_vector.push_back(priProducts[pp]);
2041 
2042  double in_deathMortality = MD->getAvailableDeathTimber(priProducts_vector,r2,currentYear-1);
2043  in_deathTimber_reg.at(pp) += in_deathMortality;
2044 
2045  // Even if I fixed all the lower bounds to zero in Opt::get_bounds_info still the model
2046  // doesn't solve with no-forest in a region.
2047  // Even with 0.0001 doesn't solve !!
2048  // With 0.001 some scenarios doesn't solve in 2093
2049  // With 0.003 vRegFixed doesn't solve in 2096
2050  // Tried with 0.2 but no changes, so put it back on 0.003
2051  //spd(max(0.001,in_reg.at(pp)),"in",r2,priProducts[pp],DATA_NOW,true);
2052  spd(in_reg.at(pp),"in",r2,priProducts[pp],DATA_NOW,true);
2053  spd(in_deathTimber_reg.at(pp),"in_deathTimber",r2,priProducts[pp],DATA_NOW,true);
2054  #ifdef QT_DEBUG
2055  if (in_reg.at(pp) < -0.0){
2056  msgOut(MSG_CRITICAL_ERROR,"Negative inventory");
2057  }
2058  #endif
2059  }
2060 
2061  // ##### Now creating a set of bonds for the optimisation that account of the fact that the same ft,dc can be used for multiple products:
2062 
2063  // 20160928: Solved a big bug: for each combination instead of taking the UNION of the various priProduct inventory sets I was taking the sum
2064  // Now both the alive and the death timber are made from the union
2065  // 20150116: As the same (ft,dc) can be used in more than one product knowing -and bounding the supply in the optimisation- each single
2066  // in(pp) is NOT enought.
2067  // We need to bound the supply for each possible combination, that is for 2^(number of prim.pr)
2068  // Here we compute the detailed inventory. TO.DO: Create the pounds in Opt. done
2069  // 20160209: Rewritten and corrected a bug that was not giving enought inv to multiproduct combinations
2070  for (uint i=0; i<nbounds; i++){
2071  vector<int> concernedPriProducts = concernedPriProductsTotal[i];
2072  vector<string> concernedPriProducts_ids = positionsToContent(priProducts,concernedPriProducts);
2073  //double debug = 0.0;
2074  //for(uint z=0;z<concernedPriProducts.size();z++){
2075  // debug += gpd("in",r2,priProducts[concernedPriProducts[z]]); // to.do: this will need to be rewritten checked!
2076  //}
2077  double bound_alive = MD->getAvailableAliveTimber(concernedPriProducts_ids,r2); // From px->vol_l, as in "in"
2078  double bound_deathTimber = MD->getAvailableDeathTimber(concernedPriProducts_ids,r2,currentYear-1); // From deathTimberInventory map
2079  double bound_total = bound_alive + bound_deathTimber;
2080 
2081  REG->inResByAnyCombination[i] = bound_total;
2082  //REG->inResByAnyCombination_deathTimber[i] = bound_deathTimber;
2083  } // end for each bond
2084  } // end each region
2085 }
2086 
2087 void
2089  msgOut(MSG_INFO, "Updating map areas..");
2090 
2091  if (!oldVol2AreaMethod){
2092  if(!MD->getBoolSetting("usePixelData")) return;
2093  for(uint i=0;i<regIds2.size();i++){
2094  ModelRegion* reg = MD->getRegion(regIds2[i]);
2095  vector <Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regIds2[i]);
2096  for (uint p=0;p<rpx.size();p++){
2097  Pixel* px = rpx[p];
2098  double pxid= px->getID();
2099  for(uint j=0;j<fTypes.size();j++){
2100  string ft = fTypes[j];
2101  double forArea = vSum(px->area.at(j));
2102  #ifdef QT_DEBUG
2103  if(forArea < 0.0 ){
2104  msgOut(MSG_CRITICAL_ERROR, "Negative forArea in updateMapAreas");
2105  }
2106  #endif
2107  px->changeValue("forArea_"+ft, forArea*10000);
2108  } // end ft
2109  } // end px
2110  } // end region
2111  } else {
2112  int currentYear = MTHREAD->SCD->getYear();
2113  map<int,double> forestArea; // foresta area by each region
2114  pair<int,double > forestAreaPair;
2115  vector<int> l2Regions = MTHREAD->MD->getRegionIds(2, true);
2116  vector <string> fTypes = MTHREAD->MD->getForTypeIds();
2117  int nFTypes = fTypes.size();
2118  int nL2Regions = l2Regions.size();
2119  for(int i=0;i<nL2Regions;i++){
2120  int regId = l2Regions[i];
2121  vector<Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regId);
2122  for(int j=0;j<nFTypes;j++){
2123  string ft = fTypes[j];
2124  //double regForArea = reg->getValue("forArea_"+ft);
2125  //double harvestedArea = gfd("harvestedArea",regId,ft,DIAM_ALL);
2126  //double regArea = gfd("regArea",regId,ft,DIAM_ALL);
2127  //cout << "Regid/ft/area/harvested/regeneration: " <<regId<<";"<<ft<<";"<<regForArea<<";"<<harvestedArea<<";" <<regArea<<endl;
2128  //double newAreaNet = regArea-harvestedArea;
2129  //double newAreaRatio = newAreaNet / regForArea;
2130  for(uint z=0;z<rpx.size();z++){
2131  Pixel* px = rpx[z];
2132  double oldValue = px->getDoubleValue("forArea_"+ft,true)/10000;
2133  double hArea = vSum(px->hArea.at(j)); //bug 20140205 areas in the model are in ha, in the layer in m^2
2134  double regArea = findMap(px->regArea,currentYear).at(j); //bug 20140205 areas in the model are in ha, in the layer in m^2
2135  //double newValue = oldValue*(1. + newAreaRatio);
2136  double newValue = oldValue-hArea+regArea;
2137  double areaNetOfRegeneration = oldValue-hArea;
2138  #ifdef QT_DEBUG
2139  if (areaNetOfRegeneration<0.0){
2140  msgOut(MSG_CRITICAL_ERROR,"areaNetOfRegeneration negative in updateMapAreas");
2141  }
2142  if (newValue<0.0){
2143  msgOut(MSG_CRITICAL_ERROR,"for area negative in updateMapAreas");
2144  }
2145  #endif
2146  rpx[z]->changeValue("forArea_"+ft, newValue*10000);
2147  }
2148  }
2149  }
2150  }
2151 }
2152 
2153 void
2155 
2156 vector<int> l2Regions = MTHREAD->MD->getRegionIds(2, true);
2157 vector <string> fTypes = MTHREAD->MD->getForTypeIds();
2158 int nFTypes = fTypes.size();
2159 int nL2Regions = l2Regions.size();
2160 for(int i=0;i<nL2Regions;i++){
2161  int regId = l2Regions[i];
2162  vector<Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regId);
2163  for(int j=0;j<nFTypes;j++){
2164  string ft = fTypes[j];
2165  for(uint z=0;z<rpx.size();z++){
2166  Pixel* px = rpx[z];
2167  double vol = vSum(px->vol.at(j));
2168  double expectedReturns = px->expectedReturns.at(j);
2169  if(MTHREAD->GIS->layerExist("vol_"+ft)){
2170  rpx[z]->changeValue("vol_"+ft, vol);
2171  }
2172  if(MTHREAD->GIS->layerExist("expectedReturns_"+ft)){
2173  rpx[z]->changeValue("expectedReturns_"+ft, expectedReturns);
2174  }
2175  }
2176  }
2177 }
2178 
2179 // update GUI image..
2180 for(int j=0;j<nFTypes;j++){
2181  string ft = fTypes[j];
2182  MTHREAD->GIS->updateImage("vol_"+ft);
2183  MTHREAD->GIS->updateImage("expectedReturns_"+ft);
2184 }
2185 
2186 
2187 }
2188 
2189 
2190 void
2192 
2193  msgOut(MSG_INFO, "Summing data pixels->region..");
2194  //vector <string> outForVariables = MTHREAD->MD->getStringVectorSetting("outForVariables");
2195  int currentYear = MTHREAD->SCD->getYear();
2196 
2197  //map<vector<string>,double> hVol_byPrd; // by regid, ft, dc, pp
2198  map<tr1::array<string, 4>,double> hVol_byPrd; // by regid, ft, dc, pp
2199 
2200  // OLD CODE TO
2201  for(uint r2= 0; r2<regIds2.size();r2++){
2202  int regId = regIds2[r2];
2203  regPx = MTHREAD->MD->getRegion(regId)->getMyPixels(); // by regId, ft, dc, pp
2204 
2205  // vector < vector <vector <double> > >hVol_byPrd; // by ft, dc, pp
2206 
2207  for(uint j=0;j<fTypes.size();j++){
2208  string ft = fTypes[j];
2209 
2210  double regArea = 0.;
2211  double sumAreaByFt = 0.;
2212  double pxForAreaByFt = 0.;
2213  double vReg = 0.;
2214  double sumSumHProductivity = 0.;
2215  double sumHArea = 0.;
2216 
2217  for (uint u=0; u<dClasses.size(); u++){
2218  string dc = dClasses[u];
2219  double vol =0.;
2220  double hV = 0.;
2221  double hArea = 0.;
2222  double vMort = 0.;
2223  double vMortAdd = 0; // fire mortality
2224  double sumHProductivity = 0.; // productivity * area
2225  for (uint pi=0;pi<regPx.size();pi++){
2226  Pixel* px = regPx[pi];
2227  vol += px->vol.at(j).at(u);
2228  hV += px->hVol.at(j).at(u);
2229  hArea += px->hArea.at(j).at(u);
2230  vMort += px->vMort.at(j).at(u);
2231  vMortAdd += px->vMortAdd.at(j).at(u); // fire mortality
2232  sumHProductivity += (px->hProductivity.at(j).at(u) * px->hArea.at(j).at(u));
2233  if(MD->getBoolSetting("outDetailedHv",DATA_NOW)){
2234  for(uint pp=0;pp<priProducts.size();pp++){
2235  string pr = priProducts[pp];
2236  // vector <string> hVKey = {i2s(regId), ft, dc, pr};
2237  tr1::array<string, 4> hVKey = {i2s(regId), ft, dc, pr};
2238  //double debug = px->hVol_byPrd.at(j).at(u).at(pp);
2239  //cout << px->getID() << '\t' << j << '\t' << u << '\t' << pp << '\t' << debug << endl;
2240  incrOrAddMapValue(hVol_byPrd, hVKey, px->hVol_byPrd.at(j).at(u).at(pp));
2241  }
2242  }
2243  }
2244  if(u){
2245  sfd(vol,"vol",regId,ft,dc,DATA_NOW);
2246  sfd(hV,"hV",regId,ft,dc,DATA_NOW,true);
2247  double hProductivityByDc = hArea ? sumHProductivity/hArea : 0.;
2248  sumSumHProductivity += (hProductivityByDc*hArea);
2249  sumHArea += hArea;
2250  sfd(hProductivityByDc,"hProductivityByDc",regId,ft,dc,DATA_NOW,true);
2251  sfd(hArea,"harvestedArea",regId,ft,dc,DATA_NOW, true);
2252  sfd(vMort,"vMort",regId,ft,dc,DATA_NOW,true);
2253  sfd(vMortAdd,"vMortAdd",regId,ft,dc,DATA_NOW,true); // fire mortality
2254  double vol_1 = gfd("vol",regId,ft,dc,currentYear-1);
2255  if(vol_1){
2256  sfd(hV/vol_1,"hr",regId,ft,dc,DATA_NOW, true);
2257  } else {
2258  sfd(0.,"hr",regId,ft,dc,DATA_NOW, true);
2259  }
2260 
2261  }
2262  } // end foreach dc
2263  double hProductivity = sumHArea? sumSumHProductivity / sumHArea : 0.;
2264  sfd(hProductivity,"hProductivity",regId,ft,"",DATA_NOW,true);
2265 
2266  for (uint p=0;p<regPx.size();p++){
2267  Pixel* px = regPx[p];
2268  vReg += px->vReg.at(j);
2269  regArea += findMap(px->regArea,currentYear).at(j);
2270  pxForAreaByFt = (px->getDoubleValue("forArea_"+ft,true)/10000);
2271 
2272  sumAreaByFt += pxForAreaByFt;
2273  //double debug1 = sumAreaByFt;
2274  if(! (sumAreaByFt >= 0.0) ){
2275  msgOut(MSG_CRITICAL_ERROR,"sumAreaByFt is not non negative.");
2276  }
2277  }
2278  sfd(vReg,"vReg",regId,ft,"",DATA_NOW, true);
2279  sfd(regArea,"regArea",regId,ft,"",DATA_NOW, true);
2280  sfd(sumAreaByFt,"forArea",regId,ft,"",DATA_NOW, true);
2281  } // end of for each ft
2282 
2283  for (uint p=0;p<regPx.size();p++){
2284  Pixel* px = regPx[p];
2285  double totPxForArea = vSum(px->area);
2286 
2287 #ifdef QT_DEBUG
2288  double totPxForArea_debug = 0.0;
2289  for(uint j=0;j<fTypes.size();j++){
2290  string ft = fTypes[j];
2291  totPxForArea_debug += (px->getDoubleValue("forArea_"+ft,true)/10000);
2292  }
2293 
2294  if ( (totPxForArea - totPxForArea_debug) > 0.0001 || (totPxForArea - totPxForArea_debug) < -0.0001 ){
2295  cout << "*** ERROR: area discrepance in pixel " << px->getID() << " of " << (totPxForArea - totPxForArea_debug) << " ha!" << endl;
2296  msgOut(MSG_CRITICAL_ERROR,"Total forest area in pixel do not coincide if token from layer forArea or (pixel) vector area!");
2297  }
2298 #endif
2299  } // end of each pixel
2300 
2301 
2302  } // end each region
2303  //cout << "done" << endl;
2304  MTHREAD->DO->printDetailedHV(hVol_byPrd);
2305 
2306 
2307  // Taking care of expected returns here..
2308  // (Changed 25/08/2016 afternoon: expRet{ft,r} are now sum{px}{expRet{ft,px}*fArea_{px}}/fArea{r} and no longer sum{px}{expRet{ft,px}*fArea_{px,ft}}/fArea{r,ft} )
2309  // Also now we report the expReturns by group and by forest, each of which is made only with the best ones within their group
2310 
2311  vector<string> parentFtypes = MTHREAD->MD->getForTypeParents();
2312 
2313  for(uint r2= 0; r2<regIds2.size();r2++){
2314  int regId = regIds2[r2];
2315  regPx = MTHREAD->MD->getRegion(regId)->getMyPixels();
2316  double totRegionForArea = 0.;
2317  double totSumExpRet = 0.;
2318  vector <double> totSumExpRet_byFTParent(parentFtypes.size(),0.0);
2319  vector <double> totSumExpRet_byFTypes(fTypes.size(),0.0);
2320 
2321  // First computing the sumExpectedReturns..
2322  for (uint p=0;p<regPx.size();p++){
2323  Pixel* px = regPx[p];
2324  //int debug_pxid = px->getID();
2325  double pxForArea = vSum(px->area);
2326  totRegionForArea += pxForArea;
2327  double bestPxExpectedRet = getMax(px->expectedReturnsNotCorrByRa);
2328  for(uint i=0;i<parentFtypes.size();i++){
2329  vector <string> childIds = MTHREAD->MD->getForTypeChilds(parentFtypes[i]);
2330  vector <int> childPos = MTHREAD->MD->getForTypeChilds_pos(parentFtypes[i]);
2331  vector<double> pxExpReturnsByChilds(childPos.size(),0.0);
2332  for(uint j=0;j<childPos.size();j++){
2333  double pxExpReturn_singleFt = px->expectedReturns.at(childPos[j]);
2334  // Manual fix to not have the expected returns of ash within the general "broadL" expected returns.
2335  // To do: remove it after we work on the ash project.. I don't like manual fixes !!!
2336  pxExpReturnsByChilds.at(j) = (childIds.at(j) == "ash") ? 0.0 : pxExpReturn_singleFt;
2337  //pxExpReturnsByChilds.at(j) = pxExpReturn_singleFt;
2338  totSumExpRet_byFTypes.at(childPos[j]) += pxExpReturn_singleFt*pxForArea;
2339  } // end of each ft
2340  totSumExpRet_byFTParent[i] += getMax(pxExpReturnsByChilds)*pxForArea;
2341  } // end for each partentFt
2342  totSumExpRet += bestPxExpectedRet * pxForArea;
2343  } // end for each px
2344 
2345  // ..and now computing the expReturns and storing them
2346  for(uint i=0;i<parentFtypes.size();i++){
2347  vector <int> childPos = MTHREAD->MD->getForTypeChilds_pos(parentFtypes[i]);
2348  for(uint j=0;j<childPos.size();j++){
2349  //double debug1 = totSumExpRet_byFTypes.at(childPos[j])/totRegionForArea;
2350  sfd(totSumExpRet_byFTypes.at(childPos[j]),"sumExpReturns",regId,fTypes.at(childPos[j]),"",DATA_NOW, true);
2351  sfd(totSumExpRet_byFTypes.at(childPos[j])/totRegionForArea,"expReturns",regId,fTypes.at(childPos[j]),"",DATA_NOW, true);
2352  } // end of each ft
2353  //double debug2 = totSumExpRet_byFTParent.at(i)/totRegionForArea;
2354  sfd(totSumExpRet_byFTParent.at(i),"sumExpReturns",regId,parentFtypes[i],"",DATA_NOW, true);
2355  sfd(totSumExpRet_byFTParent.at(i)/totRegionForArea,"expReturns",regId,parentFtypes[i],"",DATA_NOW, true);
2356 
2357  } // end for each partentFt
2358  //double debug3 = totSumExpRet/totRegionForArea;
2359  sfd(totSumExpRet,"sumExpReturns",regId,"","",DATA_NOW, true);
2360  sfd(totSumExpRet/totRegionForArea,"expReturns",regId,"","",DATA_NOW, true);
2361 
2362  } // end for each region
2363 
2364  // Computing pathogens share of forest invasion
2365  if(MD->getBoolSetting("usePathogenModule")){
2366  for(uint r2= 0; r2<regIds2.size();r2++){
2367  int regId = regIds2[r2];
2368  regPx = MTHREAD->MD->getRegion(regId)->getMyPixels();
2369  double totalForArea = 0.0;
2370  double invadedArea = 0.0;
2371  for (uint p=0;p<regPx.size();p++){
2372  Pixel* px = regPx[p];
2373  int invaded = 0.0;
2374  for(uint j=0;j<fTypes.size();j++){
2375  for (uint u=0; u<dClasses.size(); u++){
2376  if(px->getPathMortality(fTypes[j],dClasses[u]) > 0){
2377  invaded = 1.0;
2378  }
2379  }
2380  }
2381  totalForArea += vSum(px->area);
2382  invadedArea += vSum(px->area)*invaded;
2383  }
2384  sfd(invadedArea/totalForArea,"totalShareInvadedArea",regId,"","",DATA_NOW, true);
2385  }
2386  } // end we are using path model
2387 
2388 
2389 
2390 
2391 }
2392 /**
2393  * This function call registerHarvesting() (accounts for emissions from for. operations), registerDeathBiomass() (registers new stocks of death biomass),
2394  * registerProducts() (registers new stock of products) and registerTransports() (accounts for emissions from transportation).
2395  *
2396  * It pass to registerProducts():
2397  * - for primary products, the primary products exported out of the country, but not those exported to other regions or used in the region as
2398  * these are assumed to be totally transformed to secondary products;
2399  * - for secondary products, those produced in the region from locally or regionally imported primary product plus those secondary products
2400  * imported from other regions, less those exported to other regions. It doesn't include the secondary products imported from abroad the country.
2401  */
2402 void
2404 
2405  //void registerHarvesting(const int & regId, const string & fType, const double & value); ///< register the harvesting of trees -> cumEmittedForOper
2406  //void registerDeathBiomass(const double &value, const int & regId, const string &fType);
2407  //void registerProducts(const double &value, const int & regId, const string &productName);
2408  //void registerTransports(const double &distQ, const int & regId);
2409 
2410  for(uint i=0;i<regIds2.size();i++){
2411  for(uint j=0;j<fTypes.size();j++){
2412  double deathBiomass = gfd("vMort",regIds2[i],fTypes[j],DIAM_ALL,DATA_NOW)+ gfd("vMortAdd",regIds2[i],fTypes[j],DIAM_ALL,DATA_NOW);
2413  double harvesting = gfd("hV",regIds2[i],fTypes[j],DIAM_ALL,DATA_NOW);
2414  MTHREAD->CBAL->registerDeathBiomass(deathBiomass, regIds2[i], fTypes[j]); // register new stock
2415  MTHREAD->CBAL->registerHarvesting(harvesting, regIds2[i], fTypes[j]); // account for emissions. Added 201500715: it also moves the extra biomass to the death biomass pool
2416  }
2417 
2418  for(uint p=0;p<priProducts.size();p++){
2419  // for the primary products we consider only the exports as the domestic consumption is entirelly transformed in secondary products
2420  double int_exports = gpd("sa",regIds2[i],priProducts[p],DATA_NOW);
2421  MTHREAD->CBAL->registerProducts(int_exports, regIds2[i], priProducts[p]); // register new stock
2422  }
2423  for(uint p=0;p<secProducts.size();p++){
2424  // for the tranformed product we skip those that are imported, hence derived from other forest systems
2425  // but we consider those coming from other regions
2426  double consumption = gpd("dl",regIds2[i],secProducts[p],DATA_NOW); // dl = sl + net regional imports
2427  MTHREAD->CBAL->registerProducts(consumption, regIds2[i], secProducts[p]); // register new stock
2428  }
2429 
2430  }
2431  for (uint r1=0;r1<l2r.size();r1++){
2432  for (uint r2=0;r2<l2r[r1].size();r2++){
2433  int rfrom= l2r[r1][r2];
2434  double distQProd = 0.0;
2435  for (uint r3=0;r3<l2r[r1].size();r3++){
2436  int rto = l2r[r1][r3];
2437  double dist = gpd("dist",rfrom,"",DATA_NOW,i2s(rto)); //km
2438  for(uint p=0;p<allProducts.size();p++){
2439  distQProd += dist*gpd("rt",rfrom,allProducts[p],DATA_NOW,i2s(rto)); //km*Mm^3
2440  }
2441  }
2442  MTHREAD->CBAL->registerTransports(distQProd, rfrom);
2443  }
2444  }
2445  MTHREAD->CBAL->HWP_eol2energy(); // used to compute the energy substitution from hwp that reach the end of life and doesn't go to landfil. Previously the energy substitution was computed in registerProducts(), that is at the time when the product was produced.
2446 
2447 }
2448 
2449 /**
2450  * Compute the expectation weighted price based on the ratio of the international (world) price between the future and now.
2451  *
2452  * @param curLocPrice The local current price
2453  * @param worldCurPrice The world current price
2454  * @param worldFutPrice The world future price
2455  * @param sl Supply local
2456  * @param sa Supply abroad
2457  * @param expCoef The expectation coefficient for prices for the agent [0,1]
2458  * @return The expType-averaged local (or weighter) price
2459  */
2460 double
2461 ModelCoreSpatial::computeExpectedPrice(const double & curLocPrice, const double & worldCurPrice, const double & worldFutPrice, const double & sl, const double & sa, const double & expCoef){
2462  double fullExpWPrice = (curLocPrice*(worldFutPrice/worldCurPrice)*sl+worldFutPrice*sa)/(sa+sl);
2463  double curWPrice = (curLocPrice*sl+worldCurPrice*sa)/(sl+sa);
2464  return curWPrice * (1-expCoef) + fullExpWPrice * expCoef;
2465 }
2466 
2467 /**
2468  * It uses volumes from gis data to "move" volumes from one forest type to the other (when called with what="vol"). Then it moves areas
2469  * proportionally and, as dc0 volumes are not defined but area it is, compute, again proportionally, area in destination forest times for dc=0
2470  * It acts on the pix->vol, pix->area and pix->area_l vectors. It also create/update the px->values layer map for the area, but it doesn't cash the
2471  * results in forDataMap.
2472  *
2473  * It is called first with parameter what="vol" in initializePixelVolumes() and then with what="area" in initializePixelAreas().
2474  * As we need the original volumes in the area allocation, original_vols is set as a static variable.
2475  *
2476  */
2477 void
2479  if(!MD->getBoolSetting("useSpExplicitForestTypes")) return;
2480 
2481  int nFTypes = fTypes.size();
2482  int nDC = dClasses.size();
2483  int pxC = 0;
2484 
2485  for(uint ir=0;ir<regIds2.size();ir++){
2486  int r2 = regIds2[ir];
2487  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2488  regPx = REG->getMyPixels();
2489  pxC += regPx.size();
2490  }
2491 
2492  static vector<vector<vector<double>>> original_vols(pxC, vector<vector<double>>(nFTypes, vector<double>(nDC, 0.0))); // by px counter, ftype, dc
2493 
2494  if(what=="vol"){
2495  // first, before transfering volumes, saving the original ones..
2496  for(uint i=0;i<fTypes.size();i++){
2497  for (uint u=0; u<dClasses.size(); u++){
2498  int pxC_loc = 0;
2499  for(uint ir=0;ir<regIds2.size();ir++){
2500  int r2 = regIds2[ir];
2501  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2502  regPx = REG->getMyPixels();
2503  for (uint p=0;p<regPx.size();p++){
2504  Pixel* px = regPx[p];
2505  original_vols[pxC_loc][i][u] += px->vol[i][u];
2506  pxC_loc ++;
2507  }
2508  }
2509  }
2510  }
2511  for(uint i=0;i<fTypes.size();i++){
2512  string fti = fTypes[i];
2513  for(uint o=0;o<fTypes.size();o++){
2514  string fto = fTypes[o];
2515  for (uint u=1; u<dClasses.size(); u++){ // first diameter class volumes are computed from the model..
2516  string layerName = "spInput#vol#"+fto+"#"+fti+"#"+i2s(u);
2517  if (MTHREAD->GIS->layerExist(layerName)){
2518  for(uint ir=0;ir<regIds2.size();ir++){
2519  int r2 = regIds2[ir];
2520  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2521  regPx = REG->getMyPixels();
2522  for (uint p=0;p<regPx.size();p++){
2523  Pixel* px = regPx[p];
2524  double vol_transfer = min(px->getDoubleValue(layerName,true)/1000000,px->vol[i][u]) ; // Vol in the layer are in m^3, in the model in Mm^3
2525  px->vol[i][u] -= vol_transfer;
2526  px->vol[o][u] += vol_transfer;
2527  }
2528  }
2529  }
2530  }
2531  }
2532  }
2533  }
2534 
2535  if(what=="area"){
2536  /**
2537  Allocate area proportionally to volumes (see file test_proportional_computation_of_areas_from_volumes.ods)
2538  Example:
2539  FtIn FtOut Vtrasfer
2540  con ash 0.2
2541  brHf ash 0.1
2542  brCopp ash 0.3
2543  con oak 0.3
2544  brHf oak 0.2
2545  brCopp oak 0.1
2546 
2547  Vorig Aorig Vnew Anew
2548  con 10 30 9.5 28.5 Aorig-Aorig*(Vtrasfer1/Vorig)-Aorig(Vtrasfer2/Vorig)
2549  brHf 5 20 4.7 18.8
2550  brCopp 2 20 1.6 16
2551  ash 0 0 0.6 4 Aorig1*Vtrasfer1/(Vorig1)+Aorig2*Vtrasfer2/(Vorig2)+...
2552  oak 0 0 0.6 2.7
2553  70 70
2554  */
2555  // first, before transfering areas, saving the original ones (we already saved the vols in the what="vol" section, that is called before this one)..
2556  vector<vector<vector<double>>> original_areas(pxC, vector<vector<double>>(nFTypes, vector<double>(nDC, 0.0))); // by px counter, ftype, dc
2557  for(uint i=0;i<fTypes.size();i++){
2558  for (uint u=0; u<dClasses.size(); u++){
2559  int pxC_loc = 0;
2560  for(uint ir=0;ir<regIds2.size();ir++){
2561  int r2 = regIds2[ir];
2562  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2563  regPx = REG->getMyPixels();
2564  for (uint p=0;p<regPx.size();p++){
2565  Pixel* px = regPx[p];
2566  original_areas[pxC_loc][i][u] += px->area_l[i][u];
2567  pxC_loc ++;
2568  }
2569  }
2570  }
2571  }
2572 
2573 
2574  // transferred areas ordered by pxcounter, i and then o ftype. Used to then repart the 0 diameter class..
2575  vector<vector<vector<double>>> transferred_areas(pxC, vector<vector<double>>(nFTypes, vector<double>(nFTypes, 0.0))); // initialize a 3d vector of nFTypes zeros.
2576 
2577  for(uint i=0;i<fTypes.size();i++){
2578  string fti = fTypes[i];
2579  for(uint o=0;o<fTypes.size();o++){
2580  string fto = fTypes[o];
2581  for (uint u=1; u<dClasses.size(); u++){ // first diameter class area is comuted proportionally..
2582  string layerName = "spInput#vol#"+fto+"#"+fti+"#"+i2s(u);
2583  if (MTHREAD->GIS->layerExist(layerName)){
2584  int pxC_loc = 0;
2585  for(uint ir=0;ir<regIds2.size();ir++){
2586  int r2 = regIds2[ir];
2587  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2588  regPx = REG->getMyPixels();
2589  for (uint p=0;p<regPx.size();p++){
2590  Pixel* px = regPx[p];
2591  double vol_i_orig = original_vols[pxC_loc][i][u];
2592  double vol_transfer = vol_i_orig?px->getDoubleValue(layerName,true)/1000000:0.0; // Vol in the layer are in m^3, in the model in Mm^3
2593  double area_i_orig = original_areas[pxC_loc][i][u];
2594  double area_transfer = vol_i_orig?area_i_orig*vol_transfer/vol_i_orig:0.0;
2595  px->area_l[i][u] -= area_transfer;
2596  px->area[i][u] = px->area_l[i][u];
2597  px->area_l[o][u] += area_transfer;
2598  px->area[o][u] = px->area_l[o][u];
2599  transferred_areas[pxC_loc][i][o] += area_transfer;
2600  pxC_loc ++;
2601  }
2602  }
2603  }
2604  }
2605  }
2606  }
2607 
2608  // Moving the area in the 0 diameter class, for which no info is normally available..
2609  double pxC_loc = 0;
2610  for(uint ir=0;ir<regIds2.size();ir++){
2611  int r2 = regIds2[ir];
2612  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2613  regPx = REG->getMyPixels();
2614  for (uint p=0;p<regPx.size();p++){
2615  Pixel* px = regPx[p];
2616  for(uint i=0;i<fTypes.size();i++){
2617  for(uint o=0;o<fTypes.size();o++){
2618  double area_i_orig = 0.0;
2619  for (uint u=1; u<dClasses.size(); u++){ // we want to skip the 0 diameter class, this is why we don't simply use vSum()..
2620  area_i_orig += original_areas[pxC_loc][i][u];
2621  }
2622  double area_transfer_u0 = area_i_orig?original_areas[pxC_loc][i][0]*(transferred_areas[pxC_loc][i][o]/area_i_orig):0.0;
2623  px->area_l[i][0] -= area_transfer_u0 ;
2624  px->area[i][0] = px->area_l[i][0];
2625  px->area_l[o][0] += area_transfer_u0 ; // bug corrected 20151130: it was 0 instead of o (output) !!
2626  px->area[o][0] = px->area_l[0][0]; // bug corrected 20151130: it was 0 instead of o (output) !!
2627  }
2628  }
2629  pxC_loc++;
2630  }
2631  }
2632 
2633  // Alligning the area memorised in the px layers to the new areas of the ft..
2634  for(uint i=0;i<fTypes.size();i++){
2635  string fti_id = fTypes[i];
2636  forType* fti = MTHREAD->MD->getForType(fti_id);
2637  int ft_memType = fti->memType;
2638  string ft_layerName = fti->forLayer;
2639  //if(ft_memType==3){
2640  // MTHREAD->GIS->addLayer(ft_layerName,ft_layerName,false,true); //20151130: no needed as we already added it in applyForestReclassification (yes, odd, as memory type 3 layerrs do not have any reclassification rule associated, but if I don't add the layer at that time I got other errors)
2641  // }
2642  if(ft_memType==3 ||ft_memType==2){
2643  for(uint ir=0;ir<regIds2.size();ir++){
2644  int r2 = regIds2[ir];
2645  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2646  regPx = REG->getMyPixels();
2647  for (uint p=0;p<regPx.size();p++){
2648  Pixel* px = regPx[p];
2649  double area_px = vSum(px->area[i]);
2650  px->changeValue(ft_layerName,area_px*10000);
2651  }
2652  }
2653  }
2654  }
2655  } // end if what is area
2656 }
2657 
2658 void
2660  // Print debug stats on inventory and supplies in each region..
2661  cout << "Printing debug information on initial regional inventories and supplies.." << endl;
2662  cout << "Reg\tProduct\t\tInv\tSt\tSa\tSl" << endl;
2663  for(uint r1=0;r1<l2r.size();r1++){
2664  for(uint r2c=0;r2c<l2r[r1].size();r2c++){
2665  for(uint p=0;p<priProducts.size();p++){
2666  int r2 = l2r[r1][r2c];
2667  double inv = gpd("in",r2,priProducts[p],secondYear);
2668  double st = gpd("st",r2,priProducts[p],secondYear);
2669  double sl = gpd("sl",r2,priProducts[p],secondYear);
2670  double sa = gpd("sa",r2,priProducts[p],secondYear);
2671  cout << r2 << "\t" << priProducts[p] << "\t\t" << inv << "\t" << st << "\t" << sl << "\t" << sa << endl;
2672  }
2673  }
2674  } // end of r1 region
2675  exit(0);
2676 
2677 }
2678 
2679 /**
2680  * @brief ModelCoreSpatial::allocateHarvesting
2681  * @param total_st vector of total supply by primary products
2682  * @return a vector of the remaining supply that goes allocated to alive timber (that is, to harvesting)
2683  *
2684  * The algorithm is such that is loops the deathTimberInventory map for each year (newer to older), dc (higher to smaller) and ft.
2685  * It compute the primary products allocable from that combination and allocate the cell amount to decrease the total_st of that products
2686  * in a proportional way to what still remain of the allocable products.
2687  *
2688  * It is called in the runMarketModule() function.
2689  *
2690  */
2691 
2692 vector <double>
2693 ModelCoreSpatial::allocateHarvesting(vector<double> total_st, const int &regId){
2694  if(!MD->getBoolSetting("useDeathTimber",DATA_NOW)) return total_st;
2695  //vector <double> st_fromHarv(priProducts.size(),0.);
2696  //map<iisskey, double > * deathTimberInventory= MD->getDeathTimberInventory();
2697  int maxYears = MD->getMaxYearUsableDeathTimber();
2698  int currentYear = MTHREAD->SCD->getYear();
2699  for(uint y = currentYear-1; y>currentYear-1-maxYears; y--){
2700  for (int u = dClasses.size()-1; u>=0; u--){ // I need to specify u as an integer !
2701  string dc = dClasses.at(u);
2702  for (uint f=0; f<fTypes.size(); f++){
2703  string ft = fTypes[f];
2704  vector<int>allocableProducts = MD->getAllocableProductIdsFromDeathTimber(regId, ft, dc, y, currentYear-1);
2705  iisskey key(y,regId,ft,dc);
2706  double deathTimber = MD->deathTimberInventory_get(key);
2707  double sum_total_st_allocable = 0;
2708  // Computing shares/weights or remaining st to allcate
2709  for(uint ap=0;ap<allocableProducts.size();ap++){
2710  sum_total_st_allocable += total_st.at(allocableProducts[ap]);
2711  }
2712  for(uint ap=0;ap<allocableProducts.size();ap++){
2713  double allocableShare = sum_total_st_allocable?total_st.at(allocableProducts[ap])/sum_total_st_allocable:0.0;
2714  double allocated = min(total_st[allocableProducts[ap]],deathTimber*allocableShare);
2715  MD->deathTimberInventory_incrOrAdd(key,-allocated);
2716  total_st[allocableProducts[ap]] -= allocated;
2717  }
2718  }
2719  }
2720  }
2721  return total_st;
2722 }
2723 
2724 
2725 /**
2726  * This function has two tasks:
2727  * 1) compute the policy balances (- -> public cost,subside; + -> public revenue, tax) for pol_mktDirInt_s, pol_mktDirInt_d, pol_trSub and pol_tcSub
2728  * (pol_fiSub are done directly in the runManagementModule() function)
2729  * 2) compute the producer/consumer surpluses
2730  */
2731 void
2733  //int thisYear = MTHREAD->SCD->getYear();
2734  int previousYear = MTHREAD->SCD->getYear()-1;
2735 
2736  for(uint r2=0; r2<regIds2.size();r2++){
2737  int regId = regIds2[r2];
2738  ModelRegion* reg = MD->getRegion(regId);
2739 
2740  for(uint p=0;p<priProducts.size();p++){
2741  double pol_mktDirInt_s = gpd("pol_mktDirInt_s",regId,priProducts[p]);
2742  double pol_mktDirInt_s_1 = gpd("pol_mktDirInt_s",regId,priProducts[p],previousYear);
2743  double sigma = gpd("sigma",regId,priProducts[p]);
2744  double in = gpd("in",regId,priProducts[p]);
2745  double in_1 = gpd("in",regId,priProducts[p], previousYear);
2746  double pc = gpd("pc",regId,priProducts[p]);
2747  double pc_1 = gpd("pc",regId,priProducts[p], previousYear);
2748  double sc = gpd("sc",regId,priProducts[p]);
2749  double sc_1 = gpd("sc",regId,priProducts[p],previousYear);
2750  double gamma_incr = gpd("gamma_incr",regId,priProducts[p]); // elast supply to increasing stocks
2751  double gamma_decr = gpd("gamma_decr",regId,priProducts[p]); // elast supply to decreasing stocks
2752 
2753  double gamma = in>in_1 ? gamma_incr: gamma_decr;
2754  double polBal_mktDirInt_s = pc * (1-pol_mktDirInt_s) * sc;
2755  double surplus_prod = pc * sc -
2756  pc_1
2757  * pol_mktDirInt_s_1
2758  * pow(pol_mktDirInt_s,-1)
2759  * pow(sc_1,-1/sigma)
2760  * pow(in_1/in,gamma/sigma)
2761  * (sigma/(sigma+1))
2762  * pow(sc,(sigma+1)/sigma);
2763 
2764  spd(polBal_mktDirInt_s,"polBal_mktDirInt_s",regId,priProducts[p],DATA_NOW,true); // M€
2765  spd(surplus_prod,"surplus_prod",regId,priProducts[p],DATA_NOW,true);
2766  }
2767  for(uint p=0;p<secProducts.size();p++){
2768  double pol_mktDirInt_d = gpd("pol_mktDirInt_d",regId,secProducts[p]);
2769  double pol_mktDirInt_d_1 = gpd("pol_mktDirInt_d",regId,secProducts[p],previousYear);
2770  double sigma = gpd("sigma",regId,secProducts[p]);
2771  double pol_trSub = gpd("pol_trSub",regId,secProducts[p]);
2772  double pc = gpd("pc",regId,secProducts[p]);
2773  double pc_1 = gpd("pc",regId,secProducts[p],previousYear);
2774  double dc = gpd("dc",regId,secProducts[p]);
2775  double dc_1 = gpd("dc",regId,secProducts[p],previousYear);
2776  double dc0 = gpd("dc",regId,secProducts[p],secondYear);
2777  double m = gpd("m",regId,secProducts[p]);
2778  double sl = gpd("sl",regId,secProducts[p]); // sl because dl include also the production from byproduct that has not subsides in the model
2779 
2780  double polBal_mktDirInt_d = pc * (pol_mktDirInt_d - 1) * dc;
2781  double polBal_trSub = m * (pol_trSub-1) * sl;
2782  double trs = 0.5; // 0.1: we consider 90 percent of the demand integral
2783  double surplus_cons = pc_1
2784  * pol_mktDirInt_d_1
2785  * pow(dc_1,-1/sigma)
2786  * pow(pol_mktDirInt_d,-1)
2787  * (sigma/(sigma+1))
2788  * (pow(dc,(sigma+1)/sigma) - pow(trs*dc0,(sigma+1)/sigma))
2789  - (dc - trs * dc0) * pc ;
2790 
2791  spd(polBal_mktDirInt_d,"polBal_mktDirInt_d",regId,secProducts[p],DATA_NOW,true); // M€
2792  spd(polBal_trSub,"polBal_trSub",regId,secProducts[p],DATA_NOW,true); // M€
2793  spd(surplus_cons,"surplus_cons",regId,secProducts[p],DATA_NOW,true); // M€
2794  }
2795 
2796  for(uint p=0;p<allProducts.size();p++){
2797  double pol_tcSub = gpd("pol_tcSub",regId,allProducts[p]);
2798  double polBal_tcSub = 0;
2799  vector<ModelRegion*> siblings = reg->getSiblings();
2800  for(uint rTo=0;rTo<siblings.size();rTo++){
2801  int regId2 = siblings[rTo]->getRegId();
2802  double ct = gpd("ct",regId,allProducts[p],DATA_NOW,i2s(regId2));
2803  double rt = gpd("rt",regId,allProducts[p],DATA_NOW,i2s(regId2));
2804  polBal_tcSub += ct*(pol_tcSub-1) * rt;
2805  }
2806  spd(polBal_tcSub,"polBal_tcSub",regId,allProducts[p],DATA_NOW,true); // M€
2807  }
2808 
2809  }
2810 
2811 }
2812 
2813 /**
2814  * Return the average age using cumTp vector
2815  */
2816 double
2818  if(dc == dClasses.size()-1) { return px->cumTp[ft][dc] * 1.2;}
2819  else {return (px->cumTp[ft][dc]+px->cumTp[ft][dc+1])/2; }
2820 }
2821 
2822 
2823 
2824 
2825 // added for carbon project
2826 
2827 double
2828 ModelCoreSpatial::getVHaByYear(const Pixel* px, const int& ft, const int& year, const double& extraBiomass_ratio, const int& regId ) const {
2829  // This function computes and send back the expected volume/ha in a forest type after a specific amount of years
2830  // It uses expected volumes/ha at each diameter class, and makes a linear interpolation between the corresponding times of passage
2831  // Ancient version was a "stairs" function of time, now it is a succession of linear approximations
2832 
2833  double vHa = 0;
2834  int thisYear = MTHREAD->SCD->getYear();
2835 
2836  // This loop goes through all diameter classes to find the last one that was reached at year tested
2837  for (int u = 0; u<dClasses.size(); u++){
2838  // We retrieve the expected time of passage for the current diameter class
2839  // and we compare it to the year tested
2840  double cumTP = px->cumTp_exp[ft][u];
2841  if (year>=cumTP) {
2842  // Forest has already reached this class,
2843  // We do nothing and go to the next one
2844  }
2845  else {// Forest has not reached this class yet, we stop here
2846  // Assumption : Volume growth is a linear approximation between volume at previous DC and at current DC
2847  // There would be a problem if "year" was negative, but it should not happen since year starts at 0
2848  double cumTP_plus = px->cumTp_exp[ft][u];
2849  double cumTP_minus = px->cumTp_exp[ft][u-1];
2850  double volHa_plus = px->vHa_exp[ft][u];
2851  double volHa_minus = px->vHa_exp[ft][u-1];
2852  double slope = (volHa_plus - volHa_minus)/(cumTP_plus - cumTP_minus);
2853 
2854  vHa = (volHa_minus + (year-cumTP_minus)*slope)*(1+extraBiomass_ratio);
2855  return vHa;
2856  }
2857  }
2858  // if we arrive here, we have exited the loop
2859  // and the forest has reached the last diameter class
2860  int dc = dClasses.size()-1; // index of the last diameter class
2861  double mortCoeff = gfd("mort",regId,fTypes[ft],dClasses[dc],thisYear); // mortality rate of the last diameter class
2862  double cumTP_max = px-> cumTp_exp[ft][dc]; // cumulative TP to reach last diameter class
2863  double time_elapsed = year - cumTP_max; // time since the last class was reached
2864  double volHa_max = px->cumTp_exp[ft][dc]; // volume/ha when the last class was reached
2865 
2866  // Now we compute the volume/ha at the year tested
2867  // Assumption: trees don't grow anymore but slowly die according to mortality rate
2868  vHa = volHa_max*pow(1-mortCoeff,time_elapsed)*(1+extraBiomass_ratio);
2869  return vHa;
2870 } // end of function
2871 
2872 
2873 
2874 /** ANCIENT VERSION OF FUNCTION. VOLUME WAS A STAIRS FUNCTION OF TIME
2875  * Return the Volume/ha in a forest after a given number of year after planting, for a specific forest type
2876 
2877 double
2878 ModelCoreSpatial::getVHaByYear(const Pixel* px, const int& ft, const int& year, const double& extraBiomass_ratio) const {
2879  double vHa = 0;
2880  //int regId = px->getMyRegion()->getRegId();
2881  //double extraBiomass_ratio = gfd("extraBiomass_ratio",regId,fTypes[ft],"",DATA_NOW);
2882 
2883  for (int u = 0; u<dClasses.size(); u++){
2884  double cumTP_exp = px->cumTp_exp[ft][u];
2885  if (year>=cumTP_exp){
2886  vHa = px->vHa_exp[ft][u]*(1+extraBiomass_ratio);
2887  } else {
2888  return vHa;
2889  }
2890  }
2891  return vHa;
2892 }
2893 */
void updateImage(string layerName_h)
Add one layer to the system.
Definition: Gis.cpp:700
vector< vector< double > > deltaArea
Definition: Pixel.h:108
void setSpModifier(const double &value, const int &ftindex)
Definition: Pixel.h:87
Class to provide a simple integer-integer-string-string key in std maps.
Definition: BaseClass.h:213
void advanceYear()
Definition: Scheduler.h:51
int getPos(const K &element, const vector< K > &v, const int &msgCode_h=MSG_CRITICAL_ERROR)
Definition: BaseClass.h:421
Print an ERROR message, but don&#39;t stop the model.
Definition: BaseClass.h:61
The required data is for the current year.
Definition: BaseClass.h:73
void runBiologicalModule()
computes hV, hArea and new vol at end of year
vector< int > getAllocableProductIdsFromDeathTimber(const int &regId_h, const string &ft, const string &dc, const int &harvesting_year, int request_year=DATA_NOW)
Returns the ids of the primary products that is possible to obtain using the timber recorded death in...
Definition: ModelData.cpp:2105
double getArea(const string &fType_h, const string &dClass_h)
Get area by ft and dc (from pixel->area matrix)
void initialiseDeathBiomassStocks(const vector< double > &deathByFt, const int &regId)
Initialises the stocks of death biomass for the avgLive_* years before the simulation starts...
Definition: Carbon.cpp:169
int getIntSetting(const string &name_h, int position=0, int reg=WORLD) const
Definition: ModelData.cpp:1105
vector< vector< int > > l2r
vector< vector< double > > area
Definition: Pixel.h:107
vector< string > dClasses
vector< string > getForTypeParents()
Definition: ModelData.cpp:122
vector< vector< double > > cumAlive_exp
This is the expected version of cumAlive, used for calculating profits.
Definition: Pixel.h:141
bool getBoolSetting(const string &name_h, int position=0, int reg=WORLD) const
Definition: ModelData.cpp:1117
void runManagementModule()
computes regArea and expectedReturns
Do not actually output any message.
Definition: BaseClass.h:57
void initializePixelVolumes()
distribuite regional exogenous volumes to pixel volumes using corine land cover area as weight ...
vector< vector< int > > createCombinationsVector(const int &nItems)
Return a vector containing any possible combination of nItems items (including any possible subset)...
Definition: ModelData.cpp:2055
vector< Pixel * > regPx
void sumRegionalForData()
computes vol, hV, harvestedArea, regArea and expReturns at reg level from the pixel level ...
vector< vector< double > > addMort
Definition: Pixel.h:134
Forest types (struct)
Definition: ModelData.h:293
void spd(const double &value_h, const string &type_h, const int &regId_h, const string &prodId_h, const int &year=DATA_NOW, const bool &allowCreate=false, const string &freeDim_h="") const
string i2s(const int &int_h) const
integer to string conversion
Definition: BaseClass.cpp:219
ThreadManager * MTHREAD
Pointer to the Thread manager.
Definition: BaseClass.h:467
K getMax(const vector< K > &v)
Returns the value of the maximum element in the vector (the last one in case of multiple equivalent m...
Definition: BaseClass.h:391
double getAvailableDeathTimber(const vector< string > &primProd_h, int regId_h, int year_h)
Returns the timber available for a given set of primary products as stored in the deathTimberInventor...
Definition: ModelData.cpp:2076
bool layerExist(const string &layerName_h, bool exactMatch=true) const
Return a pointer to a layer given its name.
Definition: Gis.cpp:552
void changeValue(const string &layerName_h, const double &value_h, const bool &setNoValueForZero=false)
Change the value of an existing layerMTHREAD->GIS->pack(parName, forName, dClass, year)...
Definition: Pixel.cpp:135
double getValue(string layerName, int op=OP_SUM)
return the values of its own pixels for the specified layer. Possible operations: OP_SUM or OP_AVG ...
double calculateAnnualisedEquivalent(const double &amount_h, const int &years_h, const double &ir) const
Calculate the annual equivalent flow.
Definition: ModelData.cpp:1961
ModelData * MD
the model data object
Definition: ThreadManager.h:72
void resetMapValues(map< K, V > &mymap, const V &value)
Reset all values stored in a map to the specified one.
Definition: BaseClass.h:341
void loadExogenousForestLayers(const string &what)
Set pixel volumes (what="vol") OR areas (what="area") by specific forest types as defined in gis laye...
int memType
Definition: ModelData.h:296
double expType
Sampling derived expectation types of this agent (forest bilogical parameters: growth, mortality)
Definition: Pixel.h:145
vector< double > avalCoef
Availability (of wood resources) coefficient. A [0,1] coefficient (new: by forest type) that reduces ...
Definition: Pixel.h:148
vector< string > priProducts
double getAvailableAliveTimber(const vector< string > &primProd_h, int regId_h)
Returns the timber available for a given set of primary products as stored in the px->vol_l vector...
Definition: ModelData.cpp:2124
vector< vector< double > > mort
Definition: Pixel.h:133
void printOptLog(bool optimal, int &nIterations, double &obj)
Definition: Output.cpp:827
Scheduler * SCD
the scheduler object (simulation-loops scheduler)
Definition: ThreadManager.h:75
vector< vector< double > > vMort
Definition: Pixel.h:118
vector< T > positionsToContent(const vector< T > &vector_h, const vector< int > &positions)
Return a vector of content from a vector and a vector of positions (int)
Definition: BaseClass.h:359
vector< vector< double > > vol_l
store the volumes of the previous year
Definition: Pixel.h:129
void msgOut(const int &msgCode_h, const string &msg_h, const bool &refreshGUI_h=true) const
Overloaded function to print the output log.
Definition: BaseClass.cpp:50
vector< double > expectedAnnualIncome_carbon
Definition: Pixel.h:124
Gis * GIS
GIS information and methods.
Definition: ThreadManager.h:73
Thread manager. Responsable to manage the main thread and "speak" with the GUI.
Definition: ThreadManager.h:65
void incrMapValue(map< K, V > &mymap, const K &key, const V &value, const int &error_level=MSG_CRITICAL_ERROR)
Increments a value stored in a map of the specified value, given the key.
Definition: BaseClass.h:312
vector< string > getStringVectorSetting(const string &name_h, int reg=WORLD) const
Definition: ModelData.cpp:1129
Perform a SUM operation.
Definition: BaseClass.h:77
void resetPixelValues()
swap volumes->lagged_volumes and reset the other pixel vectors
double deathTimberInventory_get(const iisskey &thekey)
Definition: ModelData.h:197
vector< vector< double > > vHa_exp
This is the expected version of vHa, used for calculating profits.
Definition: Pixel.h:140
vector< string > pDClasses
vector< double > initialDc0Area
Definition: Pixel.h:109
vector< vector< double > > cumAlive
Cumulative prob of remaining alive at beginnin of a given diam class.
Definition: Pixel.h:138
vector< int > getForTypeChilds_pos(const string &forTypeId_h, bool all=false)
Definition: ModelData.cpp:109
int getMaxPos(const vector< K > &v)
Returns the position of the maximum element in the vector (the last one in case of multiple equivalen...
Definition: BaseClass.h:383
void computeInventary()
in=f(vol_t-1)
vector< vector< double > > vHa
Volume at hectar by each diameter class [m^3/ha].
Definition: Pixel.h:137
vector< double > inResByAnyCombination
Vector of inventory resource for each possible combination of primary products. This store both alive...
Definition: ModelRegion.h:86
string getStringSetting(const string &name_h, int position=0, int reg=WORLD) const
Definition: ModelData.cpp:1113
void runMarketModule()
computes st (supply total) and pw (weighted price). Optimisation inside.
void registerTransports(const double &distQ, const int &regId)
Registers the quantities emitted by transport of wood FROM a given region.
Definition: Carbon.cpp:277
int getNForTypesChilds(const string &forTypeId_h)
Definition: ModelData.cpp:135
void cacheSettings()
just cache exogenous settings from ModelData
Print an error message and stop the model.
Definition: BaseClass.h:62
void registerDeathBiomass(const double &value, const int &regId, const string &fType)
Registers the "death" of a given amount of biomass, storing it in the deathBiomass map...
Definition: Carbon.cpp:243
vector< vector< double > > vMortAdd
Definition: Pixel.h:119
double getSTData(const string &parName, const string &forName, int year, const string &d2="", double naToReturn=RETNA)
Definition: Pixel.cpp:246
Output * DO
data output
Definition: ThreadManager.h:76
vector< int > optDcChosen
Definition: Pixel.h:123
double getDoubleValue(const string &layerName_h, const bool &returnZeroForNoValue=false) const
Return the value for a specific layer.
Definition: Pixel.cpp:158
string d2s(const double &double_h) const
double to string conversion
Definition: BaseClass.cpp:229
vector< double > expectedAnnualIncome_timber
Definition: Pixel.h:125
const double getForData(const string &type_h, const int &regId_h, const string &forType_h, const string &freeDim_h, const int &year=DATA_NOW)
Definition: ModelData.cpp:1281
void updateOtherMapData()
update (if the layer exists) other gis-based data, as volumes and expected returns, taking them from the data in the px object
int vSum(const vector< int > &vector_h) const
Definition: BaseClass.h:276
bool getTempBool()
Definition: ModelData.h:145
int getYear()
Definition: Scheduler.h:49
void initialiseDeathTimber()
Set deathTimberInventory to zero for the previous years (under the hipotesis that we don&#39;t have advan...
double getPastRegArea(const int &ft_idx, const int &year)
Definition: Pixel.cpp:390
vector< string > fTypes
Pixel-level class.
Definition: Pixel.h:47
vector< vector< double > > hProductivity
Definition: Pixel.h:112
void initialiseProductsStocks(const vector< double > &qByProduct, const int &regId)
Initialises the stocks of products for the avgLive_* years before the simulation starts.
Definition: Carbon.cpp:203
void setTempBool(bool tempBool_h)
Definition: ModelData.h:146
vector< vector< double > > vol
Definition: Pixel.h:90
double getVHaByYear(const Pixel *px, const int &ft, const int &year, const double &extraBiomass_ratio, const int &regId) const
return the Volume/ha in a forest after a given number of year after planting, for a specific forest t...
double getMultiplier(const string &multiplierName, const string &forName, int year=DATA_NOW)
Definition: Pixel.cpp:184
vector< string > allProducts
vector< string > secProducts
vector< vector< double > > cumTp_exp
This is the expected version of cumTp, used for calculating profits.
Definition: Pixel.h:139
double expTypePrices
Sampling derived expectation types of this agent (prices)
Definition: Pixel.h:146
void registerHarvesting(const double &value, const int &regId, const string &fType)
Registers the harvesting of trees increasing the value of cumEmittedForOper.
Definition: Carbon.cpp:225
Print a debug message, normally filtered out.
Definition: BaseClass.h:58
Ipopt::SmartPtr< Ipopt::TNLP > OPT
Market optimisation.
Definition: ThreadManager.h:81
Carbon * CBAL
Module for the Carbon Balance.
Definition: ThreadManager.h:79
vector< vector< vector< double > > > hVol_byPrd
Definition: Pixel.h:113
Print a WARNING message.
Definition: BaseClass.h:60
vector< vector< double > > area_l
store the areas of the previous year
Definition: Pixel.h:130
vector< Pixel * > getAllPlotsByRegion(ModelRegion &region_h, bool shuffle=false)
Return the vector of all plots by a specific region (main region or subregion), optionally shuffled;...
Definition: Gis.cpp:855
string getForTypeParentId(const string &forTypeId_h)
Definition: ModelData.cpp:90
vector< string > getForTypeIds(bool all=false)
By default it doesn&#39;t return forTypes used only as input.
Definition: ModelData.cpp:430
double getSd(const vector< K > &v, bool sample=true)
Definition: BaseClass.h:408
double gpd(const string &type_h, const int &regId_h, const string &prodId_h, const int &year=DATA_NOW, const string &freeDim_h="") const
map< K, V > vectorToMap(const vector< K > &keys, const V &value=0.0)
Returns a map built using the given vector and the given (scalar) value as keys/values pairs...
Definition: BaseClass.h:349
vector< int > optFtChosen
Definition: Pixel.h:122
vector< int > optDc
Definition: Pixel.h:121
vector< double > expectedReturns
Definition: Pixel.h:120
V findMap(const map< K, V > &mymap, const K &key, const int &error_level=MSG_CRITICAL_ERROR, const V &notFoundValue=numeric_limits< V >::min()) const
Lookup a map for a value. Return the value starting from the key.
Definition: BaseClass.h:286
vector< vector< double > > tp
Definition: Pixel.h:135
double getPathMortality(const string &forType, const string &dC, int year=DATA_NOW)
Return the INCREASED mortality due to pathogen presence for a given ft and dc in a certain year (defa...
Definition: Pixel.cpp:329
ModelCoreSpatial(ThreadManager *MTHREAD_h)
double getAvgAgeByDc(Pixel *px, int ft, int dc)
return the average age of a tree at a given diameter class, using the cumTp vector ...
int getErrorLevel()
Definition: ModelData.h:144
void swap(const int &swap_what)
Assign to the delayed value the current values, e.g. vol_l = vol.
Definition: Pixel.cpp:422
string forLayer
Definition: ModelData.h:297
vector< ModelRegion * > getSiblings(bool excludeResidual=true)
Return a vector of pointers to the siblings regions.
Definition: ModelRegion.cpp:85
void printDetailedHV(map< tr1::array< string, 4 >, double > hVol_byPrd)
Definition: Output.cpp:1173
vector< double > expectedReturnsNotCorrByRa
by ft. Attenction, reported expReturns at "forest" level (compared with those at forest type level) d...
Definition: Pixel.h:127
vector< vector< double > > hVol
Definition: Pixel.h:111
void incrOrAddMapValue(map< K, V > &mymap, const K &key, const V &value)
Increments a value stored in a map of the specified value, given the key.
Definition: BaseClass.h:325
void initialiseEmissionCounters()
Initialises the emission counters to zero.
Definition: Carbon.cpp:158
void setErrorLevel(int errorLevel_h)
Definition: ModelData.h:143
void initMarketModule()
computes st and pw for second year and several needed-only-at-t0-vars for the market module ...
vector< int > getRegionIds(int level_h, bool excludeResidual=true)
Definition: ModelData.cpp:366
double computeExpectedPrice(const double &curLocPrice, const double &worldCurPrice, const double &worldFutPrice, const double &sl, const double &sa, const double &expCoef)
Compute weighted expected price for a given product.
void print(bool earlyPrint=true)
Print output. If earlyPrinting it doesn&#39;t print some stuff for which we don&#39;t yet have values...
Definition: Output.cpp:426
void HWP_eol2energy()
Computes the energy substitution for the quota of HWP that reachs end of life and doesn&#39;t go to landf...
Definition: Carbon.cpp:288
void initializePixelArea()
compute px->area for each ft and dc
vector< int > regIds2
double getDoubleSetting(const string &name_h, int position=0, int reg=WORLD) const
Definition: ModelData.cpp:1109
vector< vector< double > > cumTp
This is time of passage to REACH a diameter class (while the exogenous tp by diameter class is the ti...
Definition: Pixel.h:136
vector< vector< double > > hArea
Definition: Pixel.h:110
void sfd(const double &value_h, const string &type_h, const int &regId_h, const string &forType_h, const string &freeDim_h, const int &year=DATA_NOW, const bool &allowCreate=false) const
string getRegSName() const
Definition: ModelRegion.h:67
vector< double > vReg
Definition: Pixel.h:117
double gfd(const string &type_h, const int &regId_h, const string &forType_h, const string &freeDim_h, const int &year=DATA_NOW) const
bool app(const string &prod_h, const string &forType_h, const string &dClass_h) const
vector< vector< double > > beta
Definition: Pixel.h:132
vector< Pixel * > getMyPixels()
Definition: ModelRegion.h:86
Print an INFO message.
Definition: BaseClass.h:59
void assignSpMultiplierPropToVols()
ModelCoreSpatial::assignSpMultiplierPropToVols assigns the spatial multiplier (used in the time of re...
ModelRegion * getRegion(int regId_h)
Definition: ModelData.cpp:346
void cacheDynamicSettings()
cache settings that may change with time
void initialiseCarbonModule()
call initialiseDeathBiomassStocks(), initialiseProductsStocks() and initialiseEmissionCounters() ...
void cachePixelExogenousData()
computes pixel level tp, meta and mort
double getAvg(const vector< K > &v)
Returns the average of the elements in the vector.
Definition: BaseClass.h:399
#define DIAM_ALL
All diameter classes.
Definition: BaseClass.h:157
void updateMapAreas()
computes forArea_{ft}
void deathTimberInventory_incrOrAdd(const iisskey &thekey, double value_h)
Definition: ModelData.h:195
void registerCarbonEvents()
call registerHarvesting(), registerDeathBiomass(), registerProducts() and registerTransports() ...
vector< string > getForTypeChilds(const string &forTypeId_h)
Definition: ModelData.cpp:98
void computeCumulativeData()
computes cumTp_exp, vHa_exp, vHa
void registerProducts(const double &value, const int &regId, const string &productName)
Registers the production of a given amount of products, storing it in the products maps...
Definition: Carbon.cpp:257
double getID() const
Definition: Pixel.h:67
void printDebugInitRegionalValues()
print initial inv, st, sl and sa in each region
void computeEconomicBalances()
compute the policy balances (- -> costs; + -> rev) and the producer/consumer surpluses ...
forType * getForType(int position)
Definition: ModelData.h:128
map< int, vector< double > > regArea
Definition: Pixel.h:114
const int getMaxYearUsableDeathTimber(const string &prod_h, const string &forType_h, const string &dClass_h)
Definition: ModelData.cpp:468
vector< double > allocateHarvesting(vector< double > total_st, const int &regId)
Using the deathTimberInventory map, this function allocate the total st in st from death timber (that...