FFSM++  1.1.0
French Forest Sector Model ++
ModelCoreSpatial.cpp
Go to the documentation of this file.
1 /***************************************************************************
2  * Copyright (C) 2015 by Laboratoire d'Economie Forestière *
3  * http://ffsm-project.org *
4  * *
5  * This program is free software; you can redistribute it and/or modify *
6  * it under the terms of the GNU General Public License as published by *
7  * the Free Software Foundation; either version 3 of the License, or *
8  * (at your option) any later version, given the compliance with the *
9  * exceptions listed in the file COPYING that is distribued together *
10  * with this file. *
11  * *
12  * This program is distributed in the hope that it will be useful, *
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of *
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
15  * GNU General Public License for more details. *
16  * *
17  * You should have received a copy of the GNU General Public License *
18  * along with this program; if not, write to the *
19  * Free Software Foundation, Inc., *
20  * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
21  ***************************************************************************/
22 #include <cmath>
23 #include <algorithm>
24 #include <tr1/array>
25 
26 
27 #include "IpIpoptApplication.hpp"
28 #include "IpSolveStatistics.hpp"
29 
30 #include "ModelCoreSpatial.h"
31 #include "ModelData.h"
32 #include "ThreadManager.h"
33 #include "Opt.h"
34 #include "Scheduler.h"
35 #include "Gis.h"
36 #include "Carbon.h"
37 
38 
40  MTHREAD = MTHREAD_h;
41 }
42 
44 
45 }
46 
47 void
49  cacheSettings(); ///< cashe things like first year, second year, dClasses...
50  cacheDynamicSettings(); // cache settings that may change every year
51  initializePixelVolumes(); ///< compute px volumes vol for 2005 (including exogenous loaded volumes)
52  assignSpMultiplierPropToVols(); // assign the spatial multiplier (used in the time of return) based no more on a Normal distribution but on the volumes present in the pixel: more volume, more the pixel is fit for the ft
53  initMarketModule(); ///< inside it uses first year, second year
55  MTHREAD->DO->print(true);
56  MTHREAD->SCD->advanceYear(); ///< 2005->2006
57  int thisYear = MTHREAD->SCD->getYear(); // for debugging
58  resetPixelValues(); ///< swap volumes->lagged_volumes and reset the other pixel vectors
59  cachePixelExogenousData(); ///< compute pixel tp, meta and mort
60  computeInventary(); ///< in=f(vol_t-1)
61  //printDebugInitRegionalValues();
62  computeCumulativeData(); ///< compute cumTp_exp, vHa_exp, vHa
63  initializePixelArea(); ///< compute px->area for each ft and dc (including exogenous loaded areas)
66  updateMapAreas(); ///< update the forArea_{ft} layer on each pixel as old value-hArea+regArea
67  updateOtherMapData(); ///< update (if the layer exists) other gis-based data, as volumes and expected returns, taking them from the data in the px object
68  sumRegionalForData(); ///< only for printing stats as forest data is never used at regional level
70  //MTHREAD->DO->printDebugPixelValues(); // uncomment to enable pixel-level debugging // moved to output->print
71 
72  MTHREAD->DO->print();
73 }
74 
75 void
77  int thisYear = MTHREAD->SCD->getYear(); // for debugging
78  cacheDynamicSettings(); // cache settings that may change every year
79  resetPixelValues(); // swap volumes->lagged_volumes and reset the other pixel vectors
80  cachePixelExogenousData(); // compute pixel tp, meta and mort
81  computeInventary(); // in=f(vol_t-1)
82  runMarketModule(); // RUN THE MARKET OPTIMISATION HERE
83  computeCumulativeData(); // compute cumTp_exp, vHa_exp
87  //MTHREAD->DO->printDebugPixelValues(); // uncomment to enable pixel-level debugging // moved to output->print
89  updateOtherMapData(); // update (if the layer exists) other gis-based data, as volumes and expected returns, taking them from the data in the px object
90  sumRegionalForData(); // only for printing stats as forest data is never used at regional level
92  computeEconomicBalances(); // policy costs and welfare analysis
93  MTHREAD->DO->print(false);
94 }
95 
96 void
98  msgOut(MSG_INFO, "Starting market module (init stage)..");
99 
100  for(uint i=0;i<regIds2.size();i++){
101  int r2 = regIds2[i];
102  //RPAR('pl',i,p_tr,t-1) = sum(p_pr, a(p_pr,p_tr)*RPAR('pl',i,p_pr,t-1))+m(i,p_tr);
103  for(uint sp=0;sp<secProducts.size();sp++){
104  double value = 0;
105  for (uint pp=0;pp<priProducts.size();pp++){
106  value += gpd("pl",r2,priProducts[pp],secondYear)*
107  gpd("a",r2,priProducts[pp],secondYear,secProducts[sp]);
108  }
109  value += (gpd("m",r2,secProducts[sp],secondYear)* gpd("pol_trSub",r2,secProducts[sp],secondYear));
110  spd(value,"pl",r2,secProducts[sp],secondYear,true);
111  }
112  // RPAR('dl',i,p_pr,t-1) = sum(p_tr, a(p_pr,p_tr)*RPAR('sl',i,p_tr,t-1));
113  for (uint pp=0;pp<priProducts.size();pp++){
114  double value=0;
115  for(uint sp=0;sp<secProducts.size();sp++){
116  value += gpd("sl",r2,secProducts[sp],secondYear)*
117  gpd("a",r2,priProducts[pp],secondYear, secProducts[sp]);
118  }
119  spd(value,"dl",r2,priProducts[pp],secondYear,true);
120  double stvalue = gpd("sl",r2,priProducts[pp],secondYear)
121  + gpd("sa",r2,priProducts[pp],secondYear);
122  spd(stvalue,"st",r2,priProducts[pp],secondYear,true);
123  double supply_fromForestShare = gpd("supply_fromForestShare",r2,priProducts[pp],secondYear);
124  spd(stvalue*supply_fromForestShare,"st_fromHarv",r2,priProducts[pp],secondYear,true); // needed for the biological module in year starty+1
125  }
126  // RPAR('st',i,prd,t-1) = RPAR('sl',i,prd,t-1)+RPAR('sa',i,prd,t-1);
127  // RPAR('dt',i,prd,t-1) = RPAR('dl',i,prd,t-1)+RPAR('da',i,prd,t-1);
128  for (uint ap=0;ap<allProducts.size();ap++){
129  //double debug = gpd("dl",r2,allProducts[ap],secondYear);
130  double dtvalue = gpd("dl",r2,allProducts[ap],secondYear)
131  + gpd("da",r2,allProducts[ap],secondYear);
132 
133  spd(dtvalue,"dt",r2,allProducts[ap],secondYear,true);
134  }
135 
136  // q1(i,p_tr) = 1/(1+((RPAR('dl',i,p_tr,t-1)/RPAR('da',i,p_tr,t-1))**(1/psi(i,p_tr)))*(RPAR('pl',i,p_tr,t-1)/PT(p_tr,t-1)));
137  // p1(i,p_tr) = 1-q1(i,p_tr);
138  // RPAR('dc',i,p_tr,t-1) = (q1(i,p_tr)*RPAR('da',i,p_tr,t-1)**((psi(i,p_tr)-1)/psi(i,p_tr))+ p1(i,p_tr)*RPAR('dl',i,p_tr,t-1)**((psi(i,p_tr)-1)/psi(i,p_tr)))**(psi(i,p_tr)/(psi(i,p_tr)-1));
139  // RPAR('pc',i,p_tr,t-1) = (RPAR('da',i,p_tr,t-1)/RPAR('dc',i,p_tr,t-1))*PT(p_tr,t-1)+(RPAR('dl',i,p_tr,t-1)/RPAR('dc',i,p_tr,t-1))*RPAR('pl',i,p_tr,t-1);
140  // RPAR('pc',i,p_pr,t-1) = (RPAR('sa',i,p_pr,t-1)/RPAR('sc',i,p_pr,t-1))*PT(p_pr,t-1)+(RPAR('sl',i,p_pr,t-1)/RPAR('sc',i,p_pr,t-1))*RPAR('pl',i,p_pr,t-1);
141  // RPAR('pw',i,p_tr,t-1) = (RPAR('dl',i,p_tr,t-1)*RPAR('pl',i,p_tr,t-1)+RPAR('da',i,p_tr,t-1)*PT(p_tr,t-1))/RPAR('dt',i,p_tr,t-1) ; //changed 20120419
142  // K(i,p_tr,t-1) = k1(i,p_tr)*RPAR('sl',i,p_tr,t-1);
143  for(uint sp=0;sp<secProducts.size();sp++){
144  double psi = gpd("psi",r2,secProducts[sp],secondYear);
145  double dl = gpd("dl",r2,secProducts[sp],secondYear);
146  double da = gpd("da",r2,secProducts[sp],secondYear);
147  double pl = gpd("pl",r2,secProducts[sp],secondYear);
148  double sa = gpd("sa",r2,secProducts[sp],secondYear);
149  double sl = gpd("sl",r2,secProducts[sp],secondYear);
150  double k1 = gpd("k1",r2,secProducts[sp],secondYear);
151  double pWo = gpd("pl",WL2,secProducts[sp],secondYear); // World price (local price for region 99999)
152 
153  double stvalue = sl+sa;
154  double q1 = 1/ ( 1+pow(dl/da,1/psi)*(pl/pWo) );
155  double p1 = 1-q1;
156  double dc = pow(
157  q1*pow(da,(psi-1)/psi) + p1*pow(dl,(psi-1)/psi)
158  ,
159  psi/(psi-1)
160  );
161  double pc = (da/dc)*pWo
162  +(dl/dc)*pl;
163  double pw = (dl*pl+da*pWo)/(dl+da);
164  double k = k1*sl;
165 
166  spd(stvalue,"st",r2,secProducts[sp],secondYear,true);
167  spd(q1,"q1",r2,secProducts[sp],firstYear,true);
168  //spd(p1,"p1",r2,secProducts[sp],firstYear,true);
169  spd(dc,"dc",r2,secProducts[sp],secondYear,true);
170  spd(pc,"pc",r2,secProducts[sp],secondYear,true);
171  spd(pw,"pw",r2,secProducts[sp],secondYear,true);
172  spd(k,"k",r2,secProducts[sp],secondYear,true);
173  }
174 
175  // t1(i,p_pr) = 1/(1+((RPAR('sl',i,p_pr,t-1)/RPAR('sa',i,p_pr,t-1))**(1/eta(i,p_pr)))*(RPAR('pl',i,p_pr,t-1)/PT(p_pr,t-1)));
176  // r1(i,p_pr) = 1-t1(i,p_pr);
177  // RPAR('sc',i,p_pr,t-1) = (t1(i,p_pr)*RPAR('sa',i,p_pr,t-1)**((eta(i,p_pr)-1)/eta(i,p_pr))+ r1(i,p_pr)*RPAR('sl',i,p_pr,t-1)**((eta(i,p_pr)-1)/eta(i,p_pr)))**(eta(i,p_pr)/(eta(i,p_pr)-1))
178  // RPAR('pc',i,p_pr,t-1) = (RPAR('sa',i,p_pr,t-1)/RPAR('sc',i,p_pr,t-1))*PT(p_pr,t-1)+(RPAR('sl',i,p_pr,t-1)/RPAR('sc',i,p_pr,t-1))*RPAR('pl',i,p_pr,t-1);
179  // RPAR('pw',i,p_pr,t-1) = (RPAR('sl',i,p_pr,t-1)*RPAR('pl',i,p_pr,t-1)+RPAR('sa',i,p_pr,t-1)*PT(p_pr,t-1))/RPAR('st',i,p_pr,t-1) ; //changed 20120419
180  for(uint pp=0;pp<priProducts.size();pp++){
181 
182  double sl = gpd("sl",r2,priProducts[pp],secondYear);
183  double sa = gpd("sa",r2,priProducts[pp],secondYear);
184  double eta = gpd("eta",r2,priProducts[pp],secondYear);
185  double pl = gpd("pl",r2,priProducts[pp],secondYear);
186  double pWo = gpd("pl",WL2,priProducts[pp],secondYear); // World price (local price for region 99999)
187 
188 
189  double t1 = 1/ ( 1+(pow(sl/sa,1/eta))*(pl/pWo) );
190  double r1 = 1-t1;
191  double sc = pow(
192  t1*pow(sa,(eta-1)/eta) + r1*pow(sl,(eta-1)/eta)
193  ,
194  eta/(eta-1)
195  );
196  double pc = (sa/sc)*pWo+(sl/sc)*pl;
197  double pw = (sl*pl+sa*pWo)/(sl+sa);
198 
199  spd(t1,"t1",r2,priProducts[pp],firstYear,true);
200  //spd(r1,"r1",r2,priProducts[pp],firstYear,true);
201  spd(sc,"sc",r2,priProducts[pp],secondYear,true);
202  spd(pc,"pc",r2,priProducts[pp],secondYear,true);
203  spd(pw,"pw",r2,priProducts[pp],secondYear,true);
204  }
205 
206  // up to here tested with gams output on 20120628, that's fine !!
207  } // end for each region in level 2
208 
209 
210  // initializing the exports to zero quantities
211  // initializing of the transport cost for the same region to one and distance to zero
212  for(uint r1=0;r1<l2r.size();r1++){
213  for(uint r2=0;r2<l2r[r1].size();r2++){
214  for(uint p=0;p<allProducts.size();p++){
215  for(uint r2To=0;r2To<l2r[r1].size();r2To++){
216  spd(0,"rt",l2r[r1][r2],allProducts[p],secondYear,true,i2s(l2r[r1][r2To])); // regional trade, it was exp in gams
217  if(l2r[r1][r2] == l2r[r1][r2To]){
218  spd(1,"ct",l2r[r1][r2],allProducts[p],firstYear,true,i2s(l2r[r1][r2To])); // as long this value is higher than zero, rt within the same region is not choosen by the solver, so the value doesn't really matters. If it is zero, the solver still works and results are the same, but reported rt within the region are crazy high (100000)
219  }
220  }
221  } // end each product
222 
223  for(uint r2To=0;r2To<l2r[r1].size();r2To++){
224  if(l2r[r1][r2] == l2r[r1][r2To]){
225  spd(0,"dist",l2r[r1][r2],"",firstYear,true,i2s(l2r[r1][r2To])); // setting distance zero in code, so no need to put it in the data
226  }
227  }
228  } // end of r2 regions
229  } // end of r1 region
230 }
231 
232 void
234  msgOut(MSG_INFO, "Starting market module");
235  static double cumOverHarvesting = 0.0;
236  int thisYear = MTHREAD->SCD->getYear();
237  int previousYear = MTHREAD->SCD->getYear()-1;
238 
239  // Added for carbon poject -------------------------------
240  double intRate = MTHREAD->MD->getDoubleSetting("ir");
241 
242  // ----------------------------------------------------------------
243 
244  // *** PRE-OPTIMISATION YEARLY OPERATIONS..
245  for(uint i=0;i<regIds2.size();i++){
246  int r2 = regIds2[i];
247  for(uint sp=0;sp<secProducts.size();sp++){
248  double g1 = gpd("g1",r2,secProducts[sp],previousYear);
249  double sigma = gpd("sigma",r2,secProducts[sp]);
250  double pc_1 = gpd("pc",r2,secProducts[sp],previousYear);
251  double dc_1 = gpd("dc",r2,secProducts[sp],previousYear);
252  double k_1 = gpd("k",r2,secProducts[sp],previousYear);
253  double pol_mktDirInt_d = gpd("pol_mktDirInt_d",r2,secProducts[sp]);
254  double pol_mktDirInt_d_1 = gpd("pol_mktDirInt_d",r2,secProducts[sp],previousYear);
255  double q1 = gpd("q1",r2,secProducts[sp]);
256  double pol_mktStr_d = gpd("pol_mktStr_d",r2,secProducts[sp]);
257 
258  double q1_polCorr = min(1.0, (q1-0.5)*pol_mktStr_d+0.5); // The pol_mktStr_s coefficient makes t1 (the "b" weight in the CES function) converge toward 0.5 as pol_mktStr_s goes to 0. As pol_mktStr_s can be above 1, an upper cap of unity is imposed to t1
259 
260 
261 
262  double k = (1+g1)*k_1;
263  double aa = (sigma/(sigma+1))*pc_1*pow(dc_1,-1/sigma) * pol_mktDirInt_d_1/pol_mktDirInt_d ;
264  double gg = dc_1*pow(pc_1*pol_mktDirInt_d_1,-sigma); //alpha
265 
266  spd(k, "k" ,r2,secProducts[sp]);
267  spd(aa,"aa",r2,secProducts[sp],DATA_NOW,true);
268  spd(gg,"gg",r2,secProducts[sp],DATA_NOW,true);
269  spd(q1_polCorr,"q1_polCorr",r2,secProducts[sp],DATA_NOW,true);
270 
271  }
272 
273  // BB(i,p_pr) = (sigma(p_pr)/(sigma(p_pr)+1))*RPAR('pc',i,p_pr,t-1)*(RPAR('sc',i,p_pr,t-1)**(-1/sigma(p_pr)))*(In(i,p_pr,t-1)/In(i,p_pr,t))**(gamma(p_pr)/sigma(p_pr));
274  // FF(i,p_pr) = RPAR('sc',i,p_pr,t-1)*((RPAR('pc',i,p_pr,t-1))**(-sigma(p_pr)))*(In(i,p_pr,t)/In(i,p_pr,t-1))**(gamma(p_pr)); //chi
275  for(uint pp=0;pp<priProducts.size();pp++){
276  double gamma_incr = gpd("gamma_incr",r2,priProducts[pp]); // elast supply to increasing stocks
277  double gamma_decr = gpd("gamma_decr",r2,priProducts[pp]); // elast supply to decreasing stocks
278  double sigma = gpd("sigma",r2,priProducts[pp]); // elast supply to price
279  double pc_1 = gpd("pc",r2,priProducts[pp],previousYear);
280  double sc_1 = gpd("sc",r2,priProducts[pp],previousYear);
281  double in = gpd("in",r2,priProducts[pp])+gpd("in_deathTimber",r2,priProducts[pp]);
282  double in_1 = gpd("in",r2,priProducts[pp],previousYear)+gpd("in_deathTimber",r2,priProducts[pp],previousYear);
283  double pol_mktDirInt_s = gpd("pol_mktDirInt_s",r2,priProducts[pp]);
284  double pol_mktDirInt_s_1 = gpd("pol_mktDirInt_s",r2,priProducts[pp],previousYear);
285  double t1 = gpd("t1",r2,priProducts[pp]);
286  double pol_mktStr_s = gpd("pol_mktStr_s",r2,priProducts[pp]);
287 
288 
289  // Added for carbon project ------------------------------------------
290  double carbonPrice_1 = gpd("carbonPrice",r2,"",previousYear); //using dummy region until Anto solves issue // Antonello: solved but also generalised
291  double omega = gpd("co2content_products", r2, priProducts[pp])/1000; //kg to tons
292  double Pct_1 = carbonPrice_1*intRate;
293 
294  // -------------------------------------------------------------------
295 
296 
297  double t1_polCorr = min(1.0, (t1-0.5)*pol_mktStr_s+0.5); // The pol_mktStr_s coefficient makes t1 (the "b" weight in the CES function) converge toward 0.5 as pol_mktStr_s goes to 0. As pol_mktStr_s can be above 1, an upper cap of unity is imposed to t1
298 
299 
300  double gamma = in>in_1 ? gamma_incr: gamma_decr; // If inventories resources are decreasing we use a given elasticity, if they are increasing we use an other one
301 
302  double in_ratio = in/in_1;
303 
304  double bb = (sigma/(sigma+1.0))*pc_1*pow(sc_1,-1.0/sigma)*pow(1/in_ratio,gamma/sigma)*pow(1.0,1.0/sigma) * pol_mktDirInt_s_1/pol_mktDirInt_s ;
305 
306  /* Old FF (without carbon)
307  double ff = sc_1*pow(pc_1*pol_mktDirInt_s_1,-sigma)*pow(in_ratio,gamma); //chi
308  */
309 
310  // Added for carbon project
311  double ff = sc_1*pow((pc_1 - omega * Pct_1)*pol_mktDirInt_s_1,-sigma)*pow(in_ratio,gamma);
312  // -----------------------------------------------
313 
314  spd(bb,"bb",r2,priProducts[pp],DATA_NOW,true);
315  spd(ff,"ff",r2,priProducts[pp],DATA_NOW,true);
316  spd(sigma,"sigma",r2,priProducts[pp],DATA_NOW,true);
317  spd(t1_polCorr,"t1_polCorr",r2,priProducts[pp],DATA_NOW,true);
318 
319  }
320 
321  } // end for each region in level 2 (and updating variables)
322 
323 
324 
325  // *** OPTIMISATION....
326 
327  // Create an instance of the IpoptApplication
328  //Opt *OPTa = new Opt(MTHREAD);
329  //SmartPtr<TNLP> OPTa = new Opt(MTHREAD);
330  SmartPtr<IpoptApplication> application = new IpoptApplication();
331  string linearSolver = MTHREAD->MD->getStringSetting("linearSolver",DATA_NOW);
332  application->Options()->SetStringValue("linear_solver", linearSolver); // default in ipopt is ma27
333  //application->Options()->SetStringValue("hessian_approximation", "limited-memory"); // quasi-newton approximation of the hessian
334  //application->Options()->SetIntegerValue("mumps_mem_percent", 100);
335  application->Options()->SetNumericValue("obj_scaling_factor", -1); // maximisation
336  application->Options()->SetNumericValue("max_cpu_time", 1800); // max 1/2 hour to find the optimus for one single year
337  application->Options()->SetStringValue("check_derivatives_for_naninf", "yes");
338  //application->Options()->SetStringValue("halt_on_ampl_error", "yes"); // not a valid option
339  //application->Options()->SetStringValue("print_options_documentation", "yes"); // print the ipopt options (hundreds of pages!)
340  // Relaxing tollerance options... worster effect :-(
341  //application->Options()->SetNumericValue("tol", 1e-07);
342  //application->Options()->SetNumericValue("constr_viol_tol",0.001);
343  //application->Options()->SetNumericValue("compl_inf_tol",0.001);
344  //application->Options()->SetNumericValue("acceptable_tol", 1e-05);
345  //application->Options()->SetNumericValue("acceptable_dual_inf_tol", 1e+9);
346  //application->Options()->SetNumericValue("acceptable_constr_viol_tol", 0.1);
347  //application->Options()->SetNumericValue("acceptable_compl_inf_tol", 0.1);
348 
349 
350  // Initialize the IpoptApplication and process the options
351  ApplicationReturnStatus status;
352  status = application->Initialize();
353  if (status != Solve_Succeeded) {
354  printf("\n\n*** Error during initialization!\n");
355  msgOut(MSG_INFO,"Error during initialization! Do you have the solver compiled for the specified linear solver?");
356  return;
357  }
358 
359  msgOut(MSG_INFO,"Running optimisation problem for this year (it may take a few minutes for large models)..");
360  status = application->OptimizeTNLP(MTHREAD->OPT);
361 
362 
363  // *** POST OPTIMISATION....
364 
365  // post-equilibrium variables->parameters assignments..
366  // RPAR(type,i,prd,t) = RVAR.l(type,i,prd);
367  // EX(i,j,prd,t) = EXP.l(i,j,prd);
368  // ObjT(t) = Obj.l ;
369  // ==> in Opt::finalize_solution()
370 
371  // Retrieve some statistics about the solve
372  if (status == Solve_Succeeded) {
373  Index iter_count = application->Statistics()->IterationCount();
374  Number final_obj = application->Statistics()->FinalObjective();
375  printf("\n*** The problem solved year %d in %d iterations!\n", thisYear, iter_count);
376  printf("\n*** The final value of the objective function is %e.\n", final_obj);
377  msgOut(MSG_INFO, "The problem solved successfully year "+i2s(thisYear)+" in "+i2s(iter_count)+" iterations.");
378  int icount = iter_count;
379  double obj = final_obj;
380  MTHREAD->DO->printOptLog(true, icount, obj);
381  } else {
382  //Number final_obj = application->Statistics()->FinalObjective();
383  int thisYear = MTHREAD->SCD->getYear();
384  cout << "***ERROR: MODEL DIDN'T SOLVE FOR THIS YEAR ("<<thisYear<<")"<< endl;
385  msgOut(MSG_CRITICAL_ERROR, "Model DIDN'T SOLVE for this year ("+i2s(thisYear)+")");
386  // IMPORTANT! Don't place the next two lines above the msgOut() function or it will crash in windows if the user press the stop button
387  //Index iter_count = application->Statistics()->IterationCount(); // syserror if model doesn't solve
388  //Number final_obj = application->Statistics()->FinalObjective();
389  int icount = 0;
390  double obj = 0;
391  MTHREAD->DO->printOptLog(false, icount, obj);
392  }
393 
394  for(uint r2= 0; r2<regIds2.size();r2++){ // you can use r2<=regIds2.size() to try an out-of range memory error that is not detected other than by valgrind (with a message "Invalid read of size 4 in ModelCore::runSimulationYear() in src/ModelCore.cpp:351")
395  int regId = regIds2[r2];
396  ModelRegion* REG = MTHREAD->MD->getRegion(regId);
397 
398  // *** st adjustments:***
399  // st_or[pp] -> original st from the mkt module
400  // st_fromFor[pp] -> cover the volumes from both alive trees and death ones. This should be bounded by inResByAnyCombination, that already includes death resources
401  // st_fromHarv[pp] -> from harvesting. This should be equal to hV and IGN data
402  // st
403  // st_or = st
404  // st_fromFor = st_or * a
405  // st_fromFor > in:
406  // (overharvesting)
407  // overHarv = st_fromFor - in
408  // st_fromFor = in
409  // st = st_fromFor/a
410  // st_fromHarv = distribute(st_fromFor)
411 
412  // // total supply and total demand..
413  // RPAR('st',i,prd,t) = RPAR('sl',i,prd,t)+RPAR('sa',i,prd,t);
414  // RPAR('dt',i,prd,t) = RPAR('dl',i,prd,t)+RPAR('da',i,prd,t);
415  // // weighted prices.. //changed 20120419
416  // RPAR('pw',i,p_tr,t) = (RPAR('dl',i,p_tr,t)*RPAR('pl',i,p_tr,t)+RPAR('da',i,p_tr,t)*PT(p_tr,t))/RPAR('dt',i,p_tr,t) ; //changed 20120419
417  // RPAR('pw',i,p_pr,t) = (RPAR('sl',i,p_pr,t)*RPAR('pl',i,p_pr,t)+RPAR('sa',i,p_pr,t)*PT(p_pr,t))/RPAR('st',i,p_pr,t) ; //changed 20120419
418  for (uint p=0;p<allProducts.size();p++){
419  double st = gpd("sl",regId,allProducts[p])+gpd("sa",regId,allProducts[p]);
420  double dt = gpd("dl",regId,allProducts[p])+gpd("da",regId,allProducts[p]);
421  spd(st,"st",regId,allProducts[p]);
422  spd(st,"st_or",regId,allProducts[p],DATA_NOW,true); // original total supply, not corrected by resetting it to min(st, inv).
423  spd(dt,"dt",regId,allProducts[p]);
424  }
425  for (uint p=0;p<secProducts.size();p++){
426  double dl = gpd("dl",regId,secProducts[p]);
427  double pl = gpd("pl",regId,secProducts[p]);
428  double da = gpd("da",regId,secProducts[p]); // bug corrected 20120913
429  double pworld = gpd("pl", WL2,secProducts[p]);
430  double dt = gpd("dt",regId,secProducts[p]);
431  double pw = dt?(dl*pl+da*pworld)/dt:0.0;
432  spd(pw,"pw",regId,secProducts[p]);
433  }
434  for (uint p=0;p<priProducts.size();p++){
435  double sl = gpd("sl",regId,priProducts[p]);
436  double pl = gpd("pl",regId,priProducts[p]);
437  double sa = gpd("sa",regId,priProducts[p]); // bug corrected 20120913
438  double pworld = gpd("pl", WL2,priProducts[p]);
439  double st = gpd("st",regId,priProducts[p]);
440  double st_fromFor = st * gpd("supply_fromForestShare",regId,priProducts[p]);
441  double pw = st?(sl*pl+sa*pworld)/st:0.0;
442  spd(pw,"pw",regId,priProducts[p]);
443  spd(st_fromFor,"st_fromFor",regId,priProducts[p],DATA_NOW,true);
444  }
445 
446  // Correcting st if this is over the in
447 
448  // Create a vector with all possible combinations of primary products
449  vector<vector<int>> priPrCombs = MTHREAD->MD->createCombinationsVector(priProducts.size());
450  int nPriPrCombs = priPrCombs.size();
451 
452  for (uint i=0;i<priPrCombs.size();i++){
453  double stMkMod = 0.0;
454  double sumIn = REG->inResByAnyCombination[i];
455  // double sumIn2 = 0.0;
456  for (uint p=0;p<priPrCombs[i].size();p++){
457  stMkMod += gpd("st_fromFor",regId,priProducts[priPrCombs[i][p]]);
458  //sumIn2 += gpd("in",regId,priProducts[priPrCombs[i][p]]);
459  }
460 
461  //if(sumIn<=0.00001){
462  // for (uint p=0;p<priPrCombs[i].size();p++){
463  // spd(0.0,"st",regId,priProducts[priPrCombs[i][p]]);
464  // }
465  // } else {
466  if(stMkMod>sumIn){ // if we harvested more than available
467  string pProductsInvolved = "";
468  for (uint p=0;p<priPrCombs[i].size();p++){
469  pProductsInvolved += (priProducts[priPrCombs[i][p]]+"; ");
470  }
471  double inV_over_hV_ratio = stMkMod ? sumIn/stMkMod : 0.0;
472  cumOverHarvesting += (stMkMod-sumIn);
473  msgOut(MSG_DEBUG, "Overharvesting has happened. Year: "+i2s(thisYear)+ "Region: "+i2s(regId)+"Involved products: "+pProductsInvolved+". sumIn: "+d2s(sumIn)+" stMkMod:" +d2s(stMkMod) + " cumOverHarvesting: "+d2s(cumOverHarvesting));
474  for (uint p=0;p<priPrCombs[i].size();p++){
475  double st_fromFor_orig = gpd("st_fromFor",regId,priProducts[priPrCombs[i][p]]);
476  spd(st_fromFor_orig*inV_over_hV_ratio,"st_fromFor",regId,priProducts[priPrCombs[i][p]]);
477  }
478  }
479 
480  //}
481 
482 
483  }
484 
485  // here we create st_fromHarv as st_fromFor - st_from_deathbiomass
486  vector <double> total_st(priProducts.size(),0.);
487  vector <double> in_deathTimber(priProducts.size(),0.);
488  vector <double> in_aliveForest (priProducts.size(),0.);
489  for (uint i=0;i<priProducts.size();i++){
490  total_st[i] = gpd("st_fromFor",regId,priProducts[i]);
491  in_deathTimber[i] = gpd("in_deathTimber",regId,priProducts[i]);
492  in_aliveForest[i] = gpd("in",regId,priProducts[i]);
493  }
494 
495  vector <double> st_fromHarv= allocateHarvesting(total_st, regId);
496 
497  for (uint i=0;i<priProducts.size();i++){
498  spd(st_fromHarv[i],"st_fromHarv",regId,priProducts[i],DATA_NOW,true);
499  }
500 
501  } // end of each region
502  if (cumOverHarvesting>0.0){
503  msgOut(MSG_DEBUG, "Overharvesting is present. Year: "+i2s(thisYear)+" cumOverHarvesting: "+d2s(cumOverHarvesting));
504  }
505 }
506 
507 
508 /**
509  * @brief ModelCoreSpatial::runBiologicalModule
510  *
511  * Changes in Area:
512  * dc area_l area diff
513  * 0 ---------> +regArea -areaFirstProdClass (areaMovingUp_00)
514  * 15 ---------> +areaFirstPrClass -hArea_15 -areaMovingUp_15
515  * 25 ---------> +areaMovingUp15 - hArea_25 - areaMovingUp_25
516  * 35 ---------> +areaMovingUp25 - hArea_35 - areaMovingUp_35
517  * ...
518  * 95 ---------> +areaMovingUp85 - hArea_95 - areaMovingUp_95
519  * 105 ---------> +areaMovingUp95 - hArea_105
520  *
521  * note: regArea is computed in the management module, not here. Further, regArea is already the net one of forest area changes
522  */
523 void
525 
526  msgOut(MSG_INFO, "Starting resource module..");
527  int thisYear = MTHREAD->SCD->getYear();
528  bool useDeathTimber = MD->getBoolSetting("useDeathTimber",DATA_NOW);
529 
530  for(uint i=0;i<regIds2.size();i++){
531  int r2 = regIds2[i];
532  int regId = r2;
533  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
534  //Gis* GIS = MTHREAD->GIS;
535  regPx = REG->getMyPixels();
536  double shareMortalityUsableTimber = 0.0;
537 
538  for (uint p=0;p<regPx.size();p++){
539  Pixel* px = regPx[p];
540 
541  double pxId = px->getID();
542  //if (pxId == 3550.0){
543  // cout << "got the pixel" << endl;
544  //}
545  //px->expectedReturns.clear();
546  for(uint j=0;j<fTypes.size();j++){
547  string ft = fTypes[j];
548  //double pxArea_debug = px->getDoubleValue("forArea_"+ft, true);
549  vector <double> hV_byDiam;
550  vector <double> productivity_byDc; // mc/ha/y
551  vector < vector <double> > hV_byDiamAndPrd;
552  vector <double> hArea_byDc;
553  vector <double> newVol_byDiam;
554  vector <double> vMort_byDc;
555  vector <double> areasMovingUp(dClasses.size(), 0.0);
556  double areaFirstProdClass;
557 
558 
559  // A - COMPUTING THE REGENERATION..
560  // if we are in a year where the time of passage has not yet been reached
561  // for the specific i,e,l then we use the exogenous Vregen, otherwise we
562  // calculate it
563  //if ( not scen("fxVreg") ,
564  // loop( (i,essence,lambda),
565  // if( ord(t)>=(tp_u1(i,essence,lambda)+2),
566  // Vregen(i,lambda,essence,t)=regArea(i,essence,lambda,t-tp_u1(i,essence,lambda))*volHa_u1(i,essence,lambda)/1000000 ;
567  // );
568  // );
569  //);
570  int tp_u0 = px->tp.at(j).at(0); // time of passage to reach the first production diameter class // bug 20140318, added ceil. 20140318 removed it.. model did go crazy with it
571  if(thisYear == secondYear){
572  px->initialDc0Area.push_back(px->area_l.at(j).at(0));
573  }
574  if(regType != "fixed" && (thisYear-secondYear) >= tp_u0 ) { // T.O.D.O to be checked -> 20121109 OK
575  double pastRegArea = px->getPastRegArea(j,thisYear-tp_u0);
576  double availableArea = px->area_l.at(j).at(0);
577  //double entryVolHa = gfd("entryVolHa",regId,ft,"");
578  double vHa = px->vHa.at(j).at(1);
579  //attenction that at times could take the wrong pastRegArea if tp change too suddenly as in some "strange" scenarios
580  if (oldVol2AreaMethod){
581  areaFirstProdClass = pastRegArea;
582  } else {
583  areaFirstProdClass = min(availableArea, pastRegArea); // this is just a start and will need to include the last year area
584  }
585  px->vReg.push_back(areaFirstProdClass*vHa/1000000.0); // TO.DO: check the 1000000. Should be ok, as area in ha vol in Mm^3
586  //if (pxId == 3550.0 && j==3){
587  // cout << "got the pixel" << endl;
588  //}
589  #ifdef QT_DEBUG
590  if (areaFirstProdClass < 0.0){
591  //msgOut(MSG_CRITICAL_ERROR,"Negative regeneration volumes in endogenous regeneration");
592  }
593  if ( (availableArea-pastRegArea) < -0.00000001 ){
594  // in a very rare cases tp change first in a direction and then in the other, so that the wrong past regeneration area
595  // is picken up.
596  //msgOut(MSG_CRITICAL_ERROR,"Upgrading from dc0 more area than the available one in endogenous regeneration");
597  }
598  #endif
599  } else {
600  double regionArea = REG->getValue("forArea_"+ft,OP_SUM);
601  double pxArea = px->getDoubleValue("forArea_"+ft, true); // 20121109 bug solved (add get zero for not data)
602  double regRegVolumes = gfd("vReg",r2,ft,"");
603  double newVReg = regionArea ? regRegVolumes*pxArea/regionArea : 0.0;
604  px->vReg.push_back(newVReg); // 20121108 BUG !!! solved // as now we have the area we could also use here entryVolHa
605  // only a share of the exogenous area goes up, the regeneration one doesn't yet reach tp0:
606  // areaFirstProdClass = (1.0 / px->tp.at(j).at(0) ) * px->area_l.at(j).at(0);
607  areaFirstProdClass = (1.0 / ((double) tp_u0) ) * px->initialDc0Area.at(j);
608  // in the exogenous period we are exogenously upgrading u0->u1 some areas but, as we do not have the regeneration
609  // are corresponding to that we have also to manually add it to u0
610  //px->area_l.at(j).at(0) += areaFirstProdClass;
611  //areaFirstProdClass = entryVolHa ? newVReg*1000000 /entryVolHa:0.0;
612  //if (pxId == 3550.0 && j==3){
613  // cout << "got the pixel" << endl;
614  //}
615 
616  #ifdef QT_DEBUG
617  if (areaFirstProdClass<0.0){
618  // msgOut(MSG_CRITICAL_ERROR,"Negative regeneration volumes in exogenous regeneration");
619  }
620  if (areaFirstProdClass > px->area_l.at(j).at(0)){
621  //msgOut(MSG_CRITICAL_ERROR,"Moving up area higher than available area in exogenous regeneration !");
622  }
623  #endif
624  // vReg and entryVolHa are NOT the same thing. vReg is the yearly regeneration volumes
625  // for the whole region. We can use them when we don't know the harvested area
626  // entryVolHa can lead to vReg calculation only when we know the regeneration area. So in the
627  // first years we use vReg and subsequently the endogenous one.
628  }
629 
630  //double harvestedArea = 0;
631 
632  if(useDeathTimber){
633  shareMortalityUsableTimber = gfd("shareMortalityUsableTimber",r2,ft,"");
634  } else {
635  shareMortalityUsableTimber = 0.0;
636  }
637 
638  for (uint u=0; u<dClasses.size(); u++){
639  string dc = dClasses[u];
640  double hr = 0;
641  double age = getAvgAgeByDc(px, j, u);
642 
643  //double pastYearVol_reg = u ? gfd("vol",r2,ft,dc,thisYear-1): 0;
644  double pastYearVol = px->vol_l.at(j).at(u);
645  vector <double> hV_byPrd;
646  vector <double> hr_byPrd;
647 
648  // harvesting rate & volumes...
649  // hr is by region.. no reasons in one pixel the RATE of harvesting will be different than in an other pixel
650  //hr(u,i,essence,lambda,t) = sum(p_pr, prov(u,essence,lambda,p_pr)*RPAR('st',i,p_pr,t)/In(i,p_pr,t));
651  //hV(u,i,essence,lambda,t) = hr(u,i,essence,lambda,t) * V(u,i,lambda,essence,t-1);
652  //hV_byPrd(u,i,essence,lambda,p_pr,t) = prov(u,essence,lambda,p_pr)*(RPAR('st',i,p_pr,t)/In(i,p_pr,t))*V(u,i,lambda,essence,t-1);
653  for(uint pp=0;pp<priProducts.size();pp++){
654  double st = gpd("st_fromHarv",r2,priProducts[pp]);
655  double in = gpd("in",r2,priProducts[pp]);
656  double hr_pr = in ? app(priProducts[pp],ft,dc)*st/in : 0.0;
657  hr_byPrd.push_back( hr_pr);
658  hr += hr_pr;
659  }
660 
661  // adjusting for overharvesting..
662  // 20160204: inserted to account that we let supply to be marginally higher than in in the mamarket module, to let the solver solving
663  double origHr = hr;
664  hr = min(1.0,hr);
665  for(uint pp=0;pp<priProducts.size();pp++){
666  double hr_pr = origHr ? hr_byPrd[pp] * min(1.0,1.0/origHr) : 0.0;
667  hV_byPrd.push_back( hr_pr*pastYearVol*px->avalCoef.at(j));
668  }
669 
670  double hV = hr*pastYearVol*px->avalCoef.at(j);
671 
672 
673  hV_byDiam.push_back(hV);
674  hV_byDiamAndPrd.push_back(hV_byPrd);
675 
676  // post harvesting remained volumes computation..
677  // loop(u$(ord(u)=1),
678  // first diameter class, no harvesting and fixed regenaration..
679  // V(u,i,lambda,essence,t)=(1-1/(tp(u,i,lambda,essence))-mort(u,i,lambda,essence) )*V(u,i,lambda,essence,t-1)
680  // +Vregen(i,lambda,essence,t);
681  // );
682  // loop(u$(ord(u)>1),
683  // generic case..
684  // V(u,i,lambda,essence,t)=((1-1/(tp(u,i,lambda,essence))
685  // -mort(u,i,lambda,essence) - hr(u,i,essence,lambda,t))*V(u,i,lambda,essence,t-1)
686  // +(1/(tp(u-1,i,lambda,essence)))*beta(u,i,lambda,essence)*V(u-1,i,lambda,essence,t-1));
687  double vol;
688  double tp = px->tp.at(j).at(u); //gfd("tp",regId,ft,dc);
689  double mort = px->mort.at(j).at(u); //gfd("mortCoef",regId,ft,dc);
690  double vReg = px->vReg.at(j); //gfd("vReg",regId,ft,""); // Taking it from the memory database as we could be in a fixed vReg scenario and not having calculated it from above!
691  double beta = px->beta.at(j).at(u); //gfd("betaCoef",regId,ft,dc);
692  //double hv2fa = gfd("hv2fa",regId,ft,dc);
693  double vHa = px->vHa.at(j).at(u); //gfd("vHa",regId,ft,dc);
694  double finalHarvestFlag = gfd("finalHarvestFlag",regId,ft,dc);
695 
696  double vMort = mort*pastYearVol;
697 
698  vMort_byDc.push_back(vMort);
699 
700  if(useDeathTimber){
701  iisskey key(thisYear,r2,ft,dc);
702 
703  MD->deathTimberInventory_incrOrAdd(key,vMort*shareMortalityUsableTimber);
704  }
705 
706  if(u==0){
707  vol = 0.0;
708  }else if(u==1){
709  vol = max(0.0,(1-1/tp-mort))*pastYearVol+vReg; //Antonello, "bug" fixed 20160203: In case of very strong mortality this quantity (that doesn't include harvesting) could be negative!
710  //double debug = vol;
711  #ifdef QT_DEBUG
712  if ((1-1/tp-mort)<0.0){
713  msgOut(MSG_DEBUG,"The sum of leaving trres and mortality would have lead to nevative volume if we didn't put a max. 1/tp: "+d2s(1/tp)+", mort: "+d2s(mort)+", total coeff: "+d2s((1-1/tp-mort))+" ");
714  }
715  #endif
716  } else {
717  // time of passage and volume of smaller diameter class
718  double inc = (u==dClasses.size()-1)?0:1./tp; // we exclude the possibility for trees in the last diameter class to move to an upper class
719  double tp_1 = px->tp.at(j).at(u-1); //gfd("tp",regId,ft,dClasses[u-1]);
720  double pastYearVol_1 = px->vol_l.at(j).at(u-1); //gfd("vol",regId,ft,dClasses[u-1],thisYear-1);
721  //vol = max(0.0,(1-inc-mort-hr)*pastYearVol+(1/tp_1)*beta*pastYearVol_1);
722  vol = max(0.0,(1-inc-mort)*pastYearVol-hV+(1/tp_1)*beta*pastYearVol_1); // I can't use any more hr as it is the harvesting rate over the available volumes, not the whole ones
723  #ifdef QT_DEBUG
724  if ((1-inc-mort)*pastYearVol-hV+(1/tp_1)*beta*pastYearVol_1 < 0){
725  double realVolumes = (1-inc-mort)*pastYearVol-hV+(1/tp_1)*beta*pastYearVol_1;
726  msgOut(MSG_DEBUG,"Negative real volumes ("+d2s(realVolumes)+"), possibly because of little bit larger bounds in the market module to avoid zeros. Volumes in the resource module set back to zero, so it should be ok.");
727  }
728  #endif
729  }
730  if(u != 0){ // this if is required to avoid a 0/0 and na error that then propage also in vSum()
731  double inc = (u==dClasses.size()-1)?0:1.0/tp; // we exclude the possibility for trees in the last diameter class to move to an upper class
732  double volumesMovingUp = inc*pastYearVol;
733  double pastArea = px->area_l.at(j).at(u);
734 
735  areasMovingUp.at(u) = inc*pastArea;
736 
737  if(oldVol2AreaMethod) {
738  hArea_byDc.push_back(finalHarvestFlag*1000000*hV/vHa); // volumes are in Mm^3, area in ha, vHa in m^3/ha
739  } else {
740  double finalHarvestedVolumes = finalHarvestFlag* hV;
741  double finalHarvestedRate = pastYearVol?finalHarvestedVolumes/pastYearVol:0.0; // Here we want the harvested rate over the whole volumes, not just the available ones, so we don't need to multiply to px->avalCoef.at(j)
742  #ifdef QT_DEBUG
743  if (finalHarvestedRate > 1.0){
744  msgOut(MSG_CRITICAL_ERROR,"Negative final harvested rate.");
745  }
746  #endif
747  hArea_byDc.push_back(finalHarvestedRate*pastArea); // volumes are in Mm^3, area in ha, vHa in m^3/ha
748  }
749  px->area.at(j).at(u) = max(0.0, px->area_l.at(j).at(u) - areasMovingUp.at(u) + areasMovingUp.at(u-1) - hArea_byDc.at(u));
750  #ifdef QT_DEBUG
751  if ((px->area_l.at(j).at(u) - areasMovingUp.at(u) + areasMovingUp.at(u-1) - hArea_byDc.at(u))< 0.0){
752  msgOut(MSG_DEBUG,"If not for a max, we would have had a negative area ("+d2s(px->area_l.at(j).at(u) - areasMovingUp.at(u) + areasMovingUp.at(u-1) - hArea_byDc.at(u))+" ha).");
753  }
754  #endif
755  } else {
756  areasMovingUp.at(u) = areaFirstProdClass;
757  hArea_byDc.push_back(0.);
758  px->area.at(j).at(u) = px->area_l.at(j).at(u) - areasMovingUp.at(u) - hArea_byDc.at(u);
759  //if (pxId == 3550.0 && j==3){
760  // cout << "got the pixel" << endl;
761  //}
762  }
763  newVol_byDiam.push_back(vol);
764  #ifdef QT_DEBUG
765  if(px->area.at(j).at(u)< 0.0 || areasMovingUp.at(u) < 0.0 || hArea_byDc.at(u) < 0.0 ){
766  msgOut(MSG_CRITICAL_ERROR, "Negative values in runBiologicalModel");
767  }
768  #endif
769 
770  //double debug = hv2fa*hr*pastYearVol*100;
771  //cout << "regId|ft|dc| debug | freeArea: " << r2 << "|"<<ft<<"|"<<dc<<"| "<< debug << " | " << freeArea_byU << endl;
772 
773  //sfd(hr,"hr",regId,ft,dc);
774  //sfd(hV,"hV",regId,ft,dc);
775  //sfd(vol,"vol",regId,ft,dc);
776 
777  //sfd(freeArea_byU,"harvestedArea",regId,ft,dc,DATA_NOW,true);
778  double productivity = hArea_byDc.at(u) ? (1000000.0 * hV_byDiam.at(u))/(hArea_byDc.at(u)*age) : 0.0;
779  productivity_byDc.push_back(productivity);
780  } // end foreach diameter classes
781  px->hVol.push_back(hV_byDiam);
782  px->hVol_byPrd.push_back(hV_byDiamAndPrd);
783  px->hArea.push_back(hArea_byDc);
784  px->vol.push_back(newVol_byDiam);
785  px->vMort.push_back(vMort_byDc);
786  px->hProductivity.push_back(productivity_byDc);
787 
788 
789  #ifdef QT_DEBUG
790  for (uint u=1; u<dClasses.size(); u++){
791  double volMort = vMort_byDc[u];
792  double harvVol = hV_byDiam[u];
793  double vol_new = newVol_byDiam[u];
794  double vol_lagged = px->vol_l.at(j).at(u);
795  double gain = vol_new - (vol_lagged-harvVol-volMort);
796  if (volMort > vol_lagged){
797  msgOut(MSG_CRITICAL_ERROR,"mort vol > lagged volumes ?");
798  }
799  }
800  #endif
801  } // end of each forest type
802  } // end of each pixel
803 
804  #ifdef QT_DEBUG
805  // checking that in a region the total hVol is equal to the st for each products. 20150122 Test passed with the new availCoef
806  double sumSt = 0.0;
807  double sumHv = 0.0;
808  for(uint pp=0;pp<priProducts.size();pp++){
809  sumSt += gpd("st_fromHarv",r2,priProducts[pp]);
810  }
811  for (uint p=0;p<regPx.size();p++){
812  for(uint j=0;j<fTypes.size();j++){
813  for (uint u=0; u<dClasses.size(); u++){
814  for(uint pp=0;pp<priProducts.size();pp++){
815  // by ft, dc, pp
816  sumHv += regPx[p]->hVol_byPrd[j][u][pp];
817  }
818  }
819  }
820  }
821  if(abs(sumSt-sumHv) > 0.000001){
822  msgOut(MSG_DEBUG, "St and harvested volumes diverge in region "+REG->getRegSName()+". St: "+d2s(sumSt)+" hV: "+d2s(sumHv));
823  }
824  #endif
825  } // end of each region
826 
827 }
828 
829 
830 
831 void
833  msgOut(MSG_INFO, "Starting management module..");
834  vector<string> allFTypes = MTHREAD->MD->getForTypeIds(true);
835  map<string,double> hAreaByFTypeGroup = vectorToMap(allFTypes,0.0);
836  int thisYear = MTHREAD->SCD->getYear();
837  double ir = MTHREAD->MD->getDoubleSetting("ir");
838 
839  // Post optimisation management module..
840  for(uint i=0;i<regIds2.size();i++){
841  int r2 = regIds2[i];
842  int regId = r2;
843  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
844  regPx = REG->getMyPixels();
845 
846  string flagHartman = MTHREAD->MD->getStringSetting("flagHartman",DATA_NOW,r2);
847  double pickling = MTHREAD->MD->getDoubleSetting("pickling"); // pickling factor used to factor in sequestration ex-situ and substitution effects
848 
849  //double exogenousReference = MTHREAD->MD->getBoolSetting("exogenousReference"); // new setting to distinguish between exogenous and endogenous carbon reference levels
850 
851 
852 
853 
854  // Caching futureCarbonPrices..
855  double carbonPrice_NOW = gpd("carbonPrice",r2,"",thisYear+1); // first carbon payments take place the year after replanting
856  vector<double> futureCarbonPrices;
857  if(flagHartman!="NONE"){
858  for (int y=0; y<1000; y++) {
859  futureCarbonPrices.push_back(gpd("carbonPrice",r2,"",thisYear + y));
860  }
861  }
862 
863  vector<double> polBal_fiSub (fTypes.size(),0.0); // Expenses for forest investment subsides by region and destination ft €
864 
865  // Dealing with area change..
866  double fArea_reg = REG->getArea();
867  double fArea_diff = 0.0;
868  double fArea_reldiff = 0.0;
869  if(forestAreaChangeMethod=="relative"){
870  fArea_reldiff = gfd("forestChangeAreaIncrementsRel",r2,"","",DATA_NOW);
871  fArea_diff = fArea_reg * fArea_reldiff;
872  } else if (forestAreaChangeMethod=="absolute"){
873  fArea_diff = gfd("forestChangeAreaIncrementsHa",r2,"","",DATA_NOW);
874  fArea_reldiff = fArea_diff / fArea_reg;
875  }
876 
877  double regHArea = 0.0; // for the warning
878 
879  // Management rate
880  // 20170123: if mr is defined at regional level in the forData table, good, otherwise we look at settings table.
881  // If it isn't there as well, we rise a critical error.
882  // 20170302: removed the (false positive) error message raised if we put mr in setting table
883  // 20180626: mr is now, like all the other settings, powered with a regional dimension, so this looking in the
884  // forest data is no longer needed, but we keep it as some old projects have mr in the forData sheet.
885  double mr;
886  int origErrorLevel = MD->getErrorLevel();
887  MTHREAD->MD->setTempBool(true); // this should not be needed as it is set positive in getForData() map soubroutine..
889  mr = MD->getForData("mr",regId,"","");
890  if(!MTHREAD->MD->getTempBool()) { // mr not found in the forest data
892  mr = MD->getDoubleSetting("mr",DATA_NOW,regId);
893  }
894  //try{
895  // mr = MD->getForData("mr",regId,"","");
896  //} catch (...){
897  // mr = MD->getDoubleSetting("mr");
898  //}
899  MD->setErrorLevel(origErrorLevel);
900 
901  // Caching carbon reference levels by ftypes
902  vector<vector<double>> futureCarbonRef;
903  vector<vector<double>> futureExtraBiomass_ratio;
904 
905  if(flagHartman!="NONE"){
906  for(uint j=0;j<fTypes.size();j++){
907  string ft = fTypes[j];
908  vector<double> futureCarbonRef_ft;
909  vector<double>futureExtraBiomass_ratio_ft;
910  for (int y=0; y<1000; y++) {
911  double carbonRef = gfd("carbonRef",r2,ft,"", thisYear + y);
912  futureCarbonRef_ft.push_back(carbonRef);
913  double extraBiomass_ratio = gfd("extraBiomass_ratio",regId,ft,"",DATA_NOW);
914  futureExtraBiomass_ratio_ft.push_back(extraBiomass_ratio);
915  }
916  futureCarbonRef.push_back(futureCarbonRef_ft);
917  futureExtraBiomass_ratio.push_back(futureExtraBiomass_ratio_ft);
918  }
919  }
920 
921 
922  for (uint p=0;p<regPx.size();p++){
923  Pixel* px = regPx[p];
924  px->expectedReturns.clear();
925  px->expectedReturnsNotCorrByRa.clear(); // BUG discovered 20160825
926  //px->expectedReturnsCarbon.clear(); TODO
927  px->optDc.clear();
928  px->optFtChosen.clear();
929  px->optDcChosen.clear();
930  px->expectedAnnualIncome_carbon.clear();
931  px->expectedAnnualIncome_timber.clear();
932  resetMapValues(hAreaByFTypeGroup,0.0);
933  double totalHarvestedAreaOrig = vSum(px->hArea); // still need to remove the forest decrease areas..
934  double totalHarvestedArea = 0.0; // account for (eventual) forest decreases area)
935  vector<double> thisYearRegAreas(fTypes.size(),0.0); // initialize a vector of fTypes.size() zeros.
936  vector<double> expectedReturns(fTypes.size(),0.0); // uncorrected expected returns (without considering transaction costs). These are in form of eai
937  vector<double> expectedReturnsCarbon(fTypes.size(),0.0); // expected return of the carbon only
938  vector<int> rotation_dc(fTypes.size(),0.0); //vector to store optimal dc for each ftype (to be reused when flagHartman == "BAU")
939  vector< vector<double> > deltaAreas(fTypes.size()+1, vector<double>(fTypes.size()+1,0)); // matrix of regeneration areas ft_from/ft_to
940 
941 
942 
943  double fArea_px = vSum(px->area);
944  double fArea_diff_px = fArea_px * fArea_diff/ fArea_reg;
945 
946  double fArea_incr = max(0.0,fArea_diff_px);
947  double fArea_incr_rel = totalHarvestedAreaOrig?fArea_incr/totalHarvestedAreaOrig:0.0;
948  double fArea_decr = - min(0.0,fArea_diff_px);
949  double fArea_decr_rel = totalHarvestedAreaOrig?min(1.0,fArea_decr/totalHarvestedAreaOrig):0.0; // we can "remove" at maximum what has been harvested
950  regHArea += totalHarvestedAreaOrig;
951  totalHarvestedArea = totalHarvestedAreaOrig *(1-fArea_decr_rel);
952 
953 
954  // A - Computing the harvestingArea by parent ft group (for the allocation according to the prob of presence):
955  for(uint j=0;j<fTypes.size();j++){
956  string ft = fTypes[j];
957  string parentFt = MTHREAD->MD->getForTypeParentId(ft);
958  double hAreaThisFt=vSum(px->hArea.at(j))*(1-fArea_decr_rel);
959  incrMapValue(hAreaByFTypeGroup,parentFt,hAreaThisFt); // increment the parent ft of the harvested area, neeed for assigning the frequences (prob. of presence)
960  }
961 
962  // B - Computing the uncorrected expected returns (without considering transaction costs)
963  // 20120910, Antonello: changed.. calculating the expected returns also for fixed and fromHrLevel regeneration (then not used but gives indication)
964  // calculating the expected returns..
965  // loop ( (u,i,essence,lambda,p_pr),
966  // if (sum(u2, hV(u2,i,essence,lambda,t))= 0,
967  // expRetPondCoef(u,i,essence,lambda,p_pr) = 0;
968  // else
969  // expRetPondCoef(u,i,essence,lambda,p_pr) = hV_byPrd(u,i,essence,lambda,p_pr,t)/ sum(u2, hV(u2,i,essence,lambda,t));
970  // );
971  // );
972  // expReturns(i,essence,lambda) = sum( (u,p_pr),
973  // RPAR("pl",i,p_pr,t)*hv2fa(i,essence,lambda,u)*(1/df_byFT(u,i,lambda,essence))* // df_byFT(u,i,lambda,essence)
974  // expRetPondCoef(u,i,essence,lambda,p_pr)
975  // );
976 
977  for(uint j=0;j<fTypes.size();j++){
978  string ft = fTypes[j];
979 
980  // Added for carbon project
981  double co2content_inventory = gfd("co2content_inventory",r2,ft,"",DATA_NOW)/1000; // kilos to tons
982  // End of added
983 
984 
985  double expReturns = std::numeric_limits<double>::lowest(); // these are reset at the beginning of each forest type, and need to be declared before the next loop
986  double expReturns_carbon = std::numeric_limits<double>::lowest();
987  double expReturns_timber = std::numeric_limits<double>::lowest();
988  int optDc = 0; // "optimal diameter class", the one on which the expected returns are computed
989  for (uint u=0; u<dClasses.size(); u++){
990  string dc = dClasses[u];
991  double vHa = px->vHa_exp.at(j).at(u);
992  double finalHarvestFlag = gfd("finalHarvestFlag",regId,ft,dc);
993  double cumTp_u = px->cumTp_exp.at(j).at(u);
994  for (uint pp=0;pp<priProducts.size();pp++){
995  double pl = gpd("pl",regId,priProducts[pp]); // note that this is the OBSERVED price. If we call it at current year+cumTp_u we would have the expected price. But we would first have to compute it, as pw is weigthed price world-local and we don't have local price for the future. DONE 20141202 ;-)
996  double worldCurPrice = gpd("pl",WL2,priProducts[pp]);
997  double worldFutPrice = gpd("pl",WL2,priProducts[pp],thisYear+cumTp_u);
998  double sl = gpd("sl",regId,priProducts[pp]);
999  double sa = gpd("sa",regId,priProducts[pp]);
1000  double pw_exp = computeExpectedPrice(pl, worldCurPrice, worldFutPrice, sl, sa, px->expTypePrices); //20141030: added the expected price!
1001  double raw_amount = finalHarvestFlag*pw_exp*vHa*app(priProducts[pp],ft,dc); // B.U.G. 20121126, it was missing app(pp,ft,dc) !!
1002  double anualised_amount_timber = MD->calculateAnnualisedEquivalent(raw_amount,cumTp_u, ir); // expected revenues from timber
1003  double anualised_amount = anualised_amount_timber; // total expected revenues (for now there is no carbon payment yet)
1004 
1005  // If carbon payments are included and the reference is an exogenous fixed value (e.g. year of reference), we compute revenues from carbon after retrieving reference from input files
1006  // Added for carbon project-------------------------------------------------------------------------
1007  double raw_amount_carbon = 0; // declared and initialized before IF statement and FOR loop
1008  if (flagHartman =="EXO") {
1009  double carbonPrice_y = carbonPrice_NOW;
1010  for (int y=0; y<cumTp_u; y++) {
1011  //double carbonPrice_y = futureCarbonPrices[y];
1012  double carbonRef = futureCarbonRef[j][y];
1013  double expectedVHa_y = getVHaByYear(px, j, y, futureExtraBiomass_ratio[j][y], regId);
1014  double carbonPayment_y = carbonPrice_y*ir*(co2content_inventory*expectedVHa_y - carbonRef);
1015  raw_amount_carbon += carbonPayment_y*pow(1+ir,cumTp_u - y); // raw amount is calculated iteratively
1016  }
1017  //Now we add the final payment corresponding to carbon sequestered in long-lived products
1018  //double CarbonPrice_cumTp = futureCarbonPrices[floor(cumTp_u)]; INSTEAD WE USE CURRENT CARBON PRICE
1019  raw_amount_carbon += pickling * carbonPrice_y * finalHarvestFlag * vHa * app(priProducts[pp],ft,dc); // we add the final payment to raw amount for the final year, outside the FOR loop
1020  }
1021  // And we finally annualise everything
1022  double anualised_amount_carbon = MD->calculateAnnualisedEquivalent(raw_amount_carbon,cumTp_u, ir);
1023  anualised_amount += anualised_amount_carbon; // anualised_amount already had the timber revenues in it, now we add carbon
1024 
1025  // End of added-----------------------------------------------------------------------------------
1026 
1027 
1028 
1029  if (anualised_amount>expReturns) {
1030  expReturns=anualised_amount;
1031  optDc = u;// this is the index of the optimal dc, not the dc itself
1032  expReturns_carbon = anualised_amount_carbon;
1033  expReturns_timber = anualised_amount_timber;
1034  }
1035  } // end for each product
1036  } // end for each diameter class
1037 
1038  //results are pushed back to pixel only if they are final
1039  if (flagHartman=="NONE" || flagHartman=="EXO") {
1040  px->expectedAnnualIncome_timber.push_back(expReturns_timber);
1041  px->expectedAnnualIncome_carbon.push_back(expReturns_carbon);
1042  px->expectedReturnsNotCorrByRa.push_back(expReturns);
1043  px->optDc.push_back(optDc);
1044  }
1045 
1046  //calculation of expected returns with risk aversion
1047  if(MD->getBoolSetting("heterogeneousRiskAversion",DATA_NOW)){
1048  double ra = px->getDoubleValue("ra");
1049  double cumMort = 1-px->cumAlive_exp.at(j).at(optDc);
1050  //cout << px->getID() << "\t" << ft << "\t\t" << "optDc" << optDc << "\t" << cumMort << endl;
1051  double origExpReturns = expReturns;
1052  expReturns = origExpReturns * (1.0 - ra*cumMort);
1053  }
1054 
1055  // results are pushed back to pixel only if they are final
1056  if (flagHartman=="NONE" || flagHartman=="EXO") {
1057  px->expectedReturns.push_back(expReturns); // if there is no risk version, then expectedReturns and expectedReturnsNotCorrByRa will have the same value
1058  }
1059 
1060  expectedReturns.at(j) = expReturns;
1061  rotation_dc.at(j) = optDc; // here we store the index of optimal dc for the forest type considered
1062 
1063  } // end foreach forest type
1064 
1065 
1066  // Looping again by forest types, here by ft in input (those harvested)
1067  // Added for second carbon project-------------------------------------------------------------------------------------------------------------------------------------
1068  // If carbon payments are included and the reference is BAU (ie, endogenous, we need a new loop-------------------------------------------------------------------
1069  // In this case, the reference is what happens without policy, i.e. the result of the previous loop when (flagHartman =="REFERENCE") is FALSE
1070  // So we just redo another loop with carbon payments using the previous result as reference
1071 
1072  if (flagHartman =="ENDO") { // if carbon payments take place and reference is endogenous (is BAU)
1073  // STEP 1 - retrieve optimal regeneration choice without carbon payments
1074  int ft_opt_index = getMaxPos(expectedReturns); // get ft with max expected returns
1075  string ft_opt = fTypes[ft_opt_index]; // get its name
1076  int dc_opt_index = rotation_dc[ft_opt_index]; // get dc for forest type with max expected return
1077  string dc_opt = dClasses[dc_opt_index]; // get its name
1078  double cumTP_opt = px->cumTp_exp.at(ft_opt_index).at(dc_opt_index);
1079  double co2content_inventory_opt = gfd("co2content_inventory",r2,ft_opt,"",DATA_NOW)/1000; // kilos to tons
1080  // STEP 2 - calculate expected carbon contents on the optimal choice without carbon payments, to use as reference
1081  vector <double> FutureCarbonContent_opt;
1082  for (int y=0; y<cumTP_opt; y++) {
1083  double expectedVHa_opt_y = getVHaByYear(px, ft_opt_index, y, futureExtraBiomass_ratio[ft_opt_index][y], regId);
1084  FutureCarbonContent_opt.push_back(co2content_inventory_opt*expectedVHa_opt_y);
1085  }
1086  int cumTP_opt_floor = FutureCarbonContent_opt.size(); // need to get an integer value to compare rotation lengths
1087 
1088  // STEP 3 - new loop to find optimal regeneration choice WITH carbon payments, using the choice WITHOUT carbon payments as a reference
1089 
1090  for(uint j=0;j<fTypes.size();j++){
1091  string ft = fTypes[j];
1092  double co2content_inventory = gfd("co2content_inventory",r2,ft,"",DATA_NOW)/1000; // kilos to tons
1093 
1094  double expReturns_hartman = std::numeric_limits<double>::lowest();
1095  double expReturns_timber_hartman = std::numeric_limits<double>::lowest();
1096  double expReturns_carbon_hartman=std::numeric_limits<double>::lowest();
1097  int optDc_hartman = 0; // "optimal diameter class", the one on which the expected returns are computed
1098  for (uint u=0; u<dClasses.size(); u++){
1099  string dc = dClasses[u];
1100  double vHa = px->vHa_exp.at(j).at(u);
1101  double finalHarvestFlag = gfd("finalHarvestFlag",regId,ft,dc);
1102  double cumTp_u = px->cumTp_exp.at(j).at(u);
1103  // Here we calculate expected revenues from timber
1104  for (uint pp=0;pp<priProducts.size();pp++){
1105  double pl = gpd("pl",regId,priProducts[pp]); // note that this is the OBSERVED price. If we call it at current year+cumTp_u we would have the expected price. But we would first have to compute it, as pw is weigthed price world-local and we don't have local price for the future. DONE 20141202 ;-)
1106  double worldCurPrice = gpd("pl",WL2,priProducts[pp]);
1107  double worldFutPrice = gpd("pl",WL2,priProducts[pp],thisYear+cumTp_u);
1108  double sl = gpd("sl",regId,priProducts[pp]);
1109  double sa = gpd("sa",regId,priProducts[pp]);
1110  double pw_exp = computeExpectedPrice(pl, worldCurPrice, worldFutPrice, sl, sa, px->expTypePrices); //20141030: added the expected price!
1111  double raw_amount = finalHarvestFlag*pw_exp*vHa*app(priProducts[pp],ft,dc); // B.U.G. 20121126, it was missing app(pp,ft,dc) !!
1112  double anualised_amount_timber = MD->calculateAnnualisedEquivalent(raw_amount,cumTp_u, ir);
1113  double anualised_amount = anualised_amount_timber;
1114 
1115  // Now we calculate expected revenues from carbon
1116  double raw_amount_carbon=0;
1117  double carbonPrice_y = carbonPrice_NOW; //carbon price is by default current price, ie it is assumed to be constant from owner's perspective
1118  for (int y=0; y<cumTp_u; y++) {
1119  // calculate yearly payment for each year of the rotation
1120  double expectedVHa_y = getVHaByYear(px, j, y, futureExtraBiomass_ratio[j][y], regId);
1121  double carbonPayment_y;
1122  if (y==0) { // we use euclidian division, so an exception must be made when denominator is equal to 0
1123  carbonPayment_y = carbonPrice_y*ir*(co2content_inventory*expectedVHa_y - FutureCarbonContent_opt[y]);
1124  } else {
1125  int factor = y/cumTP_opt_floor; // euclidian division of the two lengths we compare
1126  if (cumTP_opt_floor%y >0) { // if the year is NOT a multiple of optimal rotation time, we need to take the year-th value of vector modulus how many rotations we have passed
1127  carbonPayment_y = carbonPrice_y*ir*(co2content_inventory*expectedVHa_y - FutureCarbonContent_opt[y - factor*cumTP_opt_floor]);
1128  } else { // if the year is EXACTLY a multiple of optimal rotation time, we need to take the last value of vector, not the first one
1129  carbonPayment_y = carbonPrice_y*ir*(co2content_inventory*expectedVHa_y - FutureCarbonContent_opt[cumTP_opt_floor]);
1130  }
1131  }
1132 
1133  raw_amount_carbon += carbonPayment_y*pow(1+ir,cumTp_u - y); // all payments put at end of rotation
1134  }
1135 
1136  //Now we add the final payment corresponding to carbon sequestered in long-lived products
1137  //double CarbonPrice_cumTp = futureCarbonPrices[floor(cumTp_u)]; INSTEAD WE USE CURRENT CARBON PRICE
1138  raw_amount_carbon += pickling * carbonPrice_y * finalHarvestFlag * vHa * app(priProducts[pp],ft,dc);
1139 
1140  // And we annualise carbon revenues and add them to annualised timber revenues
1141  double anualised_amount_carbon = MD->calculateAnnualisedEquivalent(raw_amount_carbon,cumTp_u, ir);
1142  anualised_amount += anualised_amount_carbon;
1143  //px->expectedReturnsCarbon = anualised_amount_carbon; TODO
1144 
1145  // If the new expected revenues is higher than previously, we store the maximum value and corresponding dc.
1146  if (anualised_amount>expReturns_hartman) {
1147  expReturns_hartman=anualised_amount;
1148  expReturns_carbon_hartman = anualised_amount_carbon;
1149  expReturns_timber_hartman = anualised_amount_timber;
1150  optDc_hartman = u;
1151  }
1152  } // end foreach primary product
1153  } // end foreach DC
1154 
1155  // Here we send results to pixel because they are final
1156  px->expectedAnnualIncome_timber.push_back(expReturns_timber_hartman);
1157  px->expectedAnnualIncome_carbon.push_back(expReturns_carbon_hartman);
1158  px->expectedReturnsNotCorrByRa.push_back(expReturns_hartman);
1159  px->optDc.push_back(optDc_hartman);
1160 
1161  // And with risk aversion
1162  if(MD->getBoolSetting("heterogeneousRiskAversion",DATA_NOW)){
1163  double ra = px->getDoubleValue("ra");
1164  double cumMort = 1-px->cumAlive_exp.at(j).at(optDc_hartman);
1165  //cout << px->getID() << "\t" << ft << "\t\t" << "optDc" << optDc << "\t" << cumMort << endl;
1166  double origExpReturns_hartman = expReturns_hartman;
1167  expReturns_hartman = origExpReturns_hartman * (1.0 - ra*cumMort);
1168  }
1169 
1170  px->expectedReturns.push_back(expReturns_hartman);
1171 
1172  // Now we replace results from the previous loop with the new ones
1173 
1174  expectedReturns.at(j) = expReturns_hartman;
1175  rotation_dc.at(j) = optDc_hartman;
1176 
1177 
1178  } // end foreach forest type
1179 
1180  } // end IF Hartman reference is BAU (no policy)----------------------------------------------------------------
1181 
1182  // End of added second carbon project---------------------------------------------------------------------------
1183 
1184  for(uint j=0;j<fTypes.size();j++){
1185  string ft = fTypes[j];
1186  forType* thisFt = MTHREAD->MD->getForType(ft);
1187 
1188  double harvestedAreaForThisFT = vSum(px->hArea.at(j))*(1-fArea_decr_rel); // gfd("harvestedArea",regId,ft,DIAM_ALL);
1189  deltaAreas[j][fTypes.size()] += vSum(px->hArea.at(j))*(fArea_decr_rel);
1190  vector<double> corrExpectedReturns(fTypes.size(),0.0); // corrected expected returns (considering transaction costs). These are in form of NPV
1191  vector<double> polBal_fiSub_unitvalue(fTypes.size(),0.0); // subside (tax) per ha per destination ft €/ha
1192 
1193  // C - Computing the corrected expected returns including transaction costs
1194 
1195  for(uint j2=0;j2<fTypes.size();j2++){
1196  string ft2 = fTypes[j2];
1197  double invTransCost = (gfd("invTransCost",regId,ft,ft2,DATA_NOW) * gfd("pol_fiSub",regId,ft,ft2,DATA_NOW) ) ;
1198  polBal_fiSub_unitvalue[j2] = gfd("invTransCost",regId,ft,ft2,DATA_NOW) * (gfd("pol_fiSub",regId,ft,ft2,DATA_NOW) - 1) ;
1199  corrExpectedReturns[j2] = (expectedReturns[j2]/ir)-invTransCost; // changed 20150718: npv = eai/ir + tr. cost // HUGE BUG 20151202: transaction costs should be REDUCED, not added to the npv...
1200  }
1201 
1202  //int highestReturnFtIndex = getMaxPos(corrExpectedReturns);
1203 
1204  // D - Assigning the Managed area
1205  // calculating freeArea at the end of the year and choosing the new regeneration area..
1206  //freeArea(i,essence,lambda) = sum(u, hv2fa(i,essence,lambda,u)*hr(u,i,essence,lambda,t)*V(u,i,lambda,essence,t-1)*100);
1207  //if(scen("endVreg") ,
1208  // regArea(i,essence,lambda,t) = freeArea(i,essence, lambda); // here we could introduce in/out area from other land usages
1209  //else
1210  // loop (i,
1211  // loop( (essence,lambda),
1212  // if ( expReturns(i,essence,lambda) = smax( (essence2,lambda2),expReturns(i,essence2,lambda2) ),
1213  // regArea (i,essence,lambda,t) = sum( (essence2, lambda2), freeArea(i,essence2, lambda2) ) * mr;
1214  // );
1215  // );
1216  // regArea(i,essence,lambda,t) = freeArea(i,essence, lambda)*(1-mr); // here we could introduce in/out area from other land usages
1217  // );
1218  //if (j==highestReturnFtIndex){
1219  // thisYearRegAreas[j] += totalHarvestedArea*mr;
1220  //}
1221  // If I Implement this I'll have a minimal diff in total area.. why ?????
1222 
1223  int optFtChosen = getMaxPos(corrExpectedReturns);
1224  px->optFtChosen.push_back(optFtChosen);
1225  px->optDcChosen.push_back(px->optDc.at(optFtChosen));
1226 
1227  thisYearRegAreas[getMaxPos(corrExpectedReturns)] += harvestedAreaForThisFT*mr;
1228  thisYearRegAreas[getMaxPos(expectedReturns)] += fArea_incr*mr/((double)fTypes.size()); // mr quota of new forest area assigned to highest expected returns ft (not considering transaction costs). Done for each forest types
1229 
1230  deltaAreas[j][getMaxPos(corrExpectedReturns)] += harvestedAreaForThisFT*mr;
1231  deltaAreas[fTypes.size()][getMaxPos(expectedReturns)] += fArea_incr*mr/((double)fTypes.size());
1232 
1233  polBal_fiSub[getMaxPos(corrExpectedReturns)] += harvestedAreaForThisFT*mr*polBal_fiSub_unitvalue[getMaxPos(corrExpectedReturns)];
1234 
1235  // E - Assigning unmanaged area
1236  //for(uint j2=0;j2<fTypes.size();j2++){
1237  if(natRegAllocation=="pp"){ // according to prob presence
1238  //string ft2 = fTypes[j2];
1239  string parentFt = MTHREAD->MD->getForTypeParentId(ft);
1240  double freq = rescaleFrequencies ? gfd("freq_norm",regId,parentFt,""):gfd("freq",regId,parentFt,""); // "probability of presence" for unmanaged forest, added 20140318
1241  double hAreaThisFtGroup = findMap(hAreaByFTypeGroup,parentFt);
1242  double hRatio = 1.0;
1243  if(hAreaThisFtGroup>0){
1244  //double harvestedAreaForThisFT2 = vSum(px->hArea.at(j2));
1245  hRatio = harvestedAreaForThisFT/hAreaThisFtGroup;
1246  } else {
1247  int nFtChilds = MTHREAD->MD->getNForTypesChilds(parentFt);
1248  hRatio = 1.0/nFtChilds;
1249  }
1250  thisYearRegAreas[j] += totalHarvestedArea*(1-mr)*freq*hRatio;
1251  thisYearRegAreas[j] += fArea_incr*(1-mr)*freq*hRatio; // non-managed quota of new forest area assigning proportionally on pp at sp group level
1252  //thisYearRegAreas[j2] += harvestedAreaForThisFT*(1-mr)*freq*hRatio;
1253  // Here is the problem with building a transaction matrix ft_from/ft_to: we don't explicitly assign from/to for unmanaged areas !
1254  for(uint j2=0;j2<fTypes.size();j2++){
1255  deltaAreas[j2][j] += vSum(px->hArea.at(j2))*(1-fArea_decr_rel)*(1-mr)*freq*hRatio;
1256  }
1257  deltaAreas[fTypes.size()][j] += fArea_incr*(1-mr)*freq*hRatio;
1258 
1259 
1260 
1261  } else { // prob presence not used..
1262 
1263  // Accounting for mortality arising from pathogens. Assigning the area to siblings according to area..
1264  // TODO: chek that mortRatePath doesn't involve adding forest area to deltaAreas[j][""]
1265 
1266  double mortRatePath = px->getPathMortality(ft, "0");
1267  if(mortRatePath > 0){
1268 
1269  string parentFt = MTHREAD->MD->getForTypeParentId(ft);
1270  vector <string> siblings = MTHREAD->MD->getForTypeChilds(parentFt);
1271  vector <double> siblingAreas;
1272  for(uint j2=0;j2<siblings.size();j2++){
1273  if(siblings[j2]==ft){
1274  siblingAreas.push_back(0.0);
1275  } else {
1276  //string debug_sibling_ft = siblings[j2];
1277  //int debug_positin = getPos(debug_sibling_ft,fTypes);
1278  double thisSiblingArea = vSum(px->area.at(getPos(siblings[j2],fTypes)));
1279  siblingAreas.push_back(thisSiblingArea);
1280  }
1281  }
1282  double areaAllSiblings = vSum(siblingAreas);
1283  thisYearRegAreas[j] += harvestedAreaForThisFT*(1-mr)*(1-mortRatePath);
1284  deltaAreas[j][j] += harvestedAreaForThisFT*(1-mr)*(1-mortRatePath);
1285 
1286  if(areaAllSiblings>0.0){ // area of siblings is >0: we attribute the area from the pathogen induced mortality to the siblings proportionally to area..
1287  for(uint j2=0;j2<siblings.size();j2++){
1288 // int debug1 = getPos(siblings[j2],fTypes);
1289 // double debug2= harvestedAreaForThisFT;
1290 // double debug3 = 1.0-mr;
1291 // double debug4 = mortRatePath;
1292 // double debug5 = siblingAreas[j2];
1293 // double debug6 = areaAllSiblings;
1294 // double debug7 = harvestedAreaForThisFT*(1.0-mr)*(mortRatePath)*(siblingAreas[j2]/areaAllSiblings);
1295  thisYearRegAreas[getPos(siblings[j2],fTypes)] += harvestedAreaForThisFT*(1.0-mr)*(mortRatePath)*(siblingAreas[j2]/areaAllSiblings);
1296  deltaAreas[j][getPos(siblings[j2],fTypes)] += harvestedAreaForThisFT*(1.0-mr)*(mortRatePath)*(siblingAreas[j2]/areaAllSiblings);
1297  }
1298  } else if (siblings.size()>1) { // area of all siblings is 0, we just give them the mortality area in equal parts..
1299  for(uint j2=0;j2<siblings.size();j2++){
1300  if (siblings[j2] != ft){
1301  thisYearRegAreas[getPos(siblings[j2],fTypes)] += harvestedAreaForThisFT*(1.0-mr)*(mortRatePath)* 1.0 / (( (float) siblings.size())-1.0);
1302  deltaAreas[j][getPos(siblings[j2],fTypes)] += harvestedAreaForThisFT*(1.0-mr)*(mortRatePath)* 1.0 / (( (float) siblings.size())-1.0);
1303  }
1304  }
1305  }
1306  } else { // mortRatePath == 0
1307  thisYearRegAreas[j] += harvestedAreaForThisFT*(1.0-mr);
1308  deltaAreas[j][j] += harvestedAreaForThisFT*(1.0-mr);
1309  }
1310 
1311  // Allocating non-managed quota of new forest area to ft proportionally to the current area share by ft
1312  double newAreaThisFt = vSum(px->area) ? fArea_incr*(1-mr)*vSum(px->area.at(j))/vSum(px->area): 0.0;
1313  thisYearRegAreas[j] += newAreaThisFt;
1314  deltaAreas[fTypes.size()][j] += newAreaThisFt;
1315 
1316  if(! (thisYearRegAreas[j] >= 0.0) ){
1317  msgOut(MSG_ERROR,"thisYearRegAreas[j] is not non negative (j: "+i2s(j)+", thisYearRegAreas[j]: "+i2s( thisYearRegAreas[j])+").");
1318  }
1319  //thisYearRegAreas[j2] += harvestedAreaForThisFT*(1-mr);
1320  }
1321  //}
1322  } // end for each forest type
1323 
1324  // adding regeneration area to the fist (00) diameter class
1325  for(uint j=0;j<fTypes.size();j++){
1326  px->area.at(j).at(0) += thisYearRegAreas.at(j);
1327  }
1328 
1329  #ifdef QT_DEBUG
1330  double totalRegArea = vSum(thisYearRegAreas);
1331  if (! (totalRegArea==0.0 && totalHarvestedArea==0.0)){
1332  double ratio = totalRegArea / totalHarvestedArea ;
1333  if(rescaleFrequencies && (ratio < 0.99999999999 || ratio > 1.00000000001) && forestAreaChangeMethod == "fixed") {
1334  msgOut(MSG_CRITICAL_ERROR, "Sum of regeneration areas not equal to sum of harvested area in runManagementModel()!");
1335  }
1336  }
1337  #endif
1338 
1339  px->regArea.insert(pair <int, vector<double> > (MTHREAD->SCD->getYear(), thisYearRegAreas));
1340  px->deltaArea = deltaAreas;
1341 
1342  } // end of each pixel
1343  if (-fArea_diff > regHArea){
1344  msgOut(MSG_WARNING,"In region "+ i2s(regId) + " the exogenous area decrement ("+ d2s(-fArea_diff) +" ha) is higger than the harvesting ("+ d2s(regHArea) +" ha). Ratio forced to 1.");
1345  }
1346 
1347  for(uint j=0;j<fTypes.size();j++){
1348  double debug = polBal_fiSub[j]/1000000;
1349  sfd((polBal_fiSub[j]/1000000.0),"polBal_fiSub",regId,fTypes[j],"",DATA_NOW,true); //M€
1350 
1351  }
1352  } // end of each region
1353 }
1354 
1355 void
1357  msgOut(MSG_INFO, "Cashing initial model settings..");
1358  MD = MTHREAD->MD;
1359  firstYear = MD->getIntSetting("initialYear");
1360  secondYear = firstYear+1;
1361  thirdYear = firstYear+2;
1362  WL2 = MD->getIntSetting("worldCodeLev2");
1363  regIds2 = MD->getRegionIds(2);
1364  priProducts = MD->getStringVectorSetting("priProducts");
1365  secProducts = MD->getStringVectorSetting("secProducts");
1367  allProducts.insert( allProducts.end(), secProducts.begin(), secProducts.end() );
1368  dClasses = MD->getStringVectorSetting("dClasses");
1369  pDClasses; // production diameter classes: exclude the fist diameter class below 15 cm
1370  pDClasses.insert(pDClasses.end(), dClasses.begin()+1, dClasses.end() );
1371  fTypes= MD->getForTypeIds();
1372  l2r = MD->getRegionIds();
1373 }
1374 
1375 void
1377  // Settings needs explicitly DATA_NOW to have a temporal dymension..
1378  msgOut(MSG_INFO, "Cashing model settings that may change every year..");
1379  regType = MTHREAD->MD->getStringSetting("regType",DATA_NOW); // how the regeneration should be computed (exogenous, from hr, from allocation choises)
1380  natRegAllocation = MTHREAD->MD->getStringSetting("natRegAllocation",DATA_NOW); // how to allocate natural regeneration
1381  rescaleFrequencies = MD->getBoolSetting("rescaleFrequencies",DATA_NOW);
1382  oldVol2AreaMethod = MD->getBoolSetting("oldVol2AreaMethod",DATA_NOW);
1383  //mr = MD->getDoubleSetting("mr");
1384  forestAreaChangeMethod = MTHREAD->MD->getStringSetting("forestAreaChangeMethod",DATA_NOW);
1385  ir = MD->getDoubleSetting("ir",DATA_NOW);
1386 }
1387 
1388 void
1390  msgOut(MSG_INFO, "Starting initializing pixel-level values");
1391 
1392  // pxVol = regVol * pxArea/regForArea
1393  // this function can be done only at the beginning of the model, as it assume that the distribution of volumes by diameter class in the pixels within a certain region is homogeneous, but as the model progress along the time dimension this is no longer true.
1394  if(!MD->getBoolSetting("usePixelData")) return;
1395  for(uint i=0;i<regIds2.size();i++){
1396  ModelRegion* reg = MD->getRegion(regIds2[i]);
1397  vector <Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regIds2[i]);
1398  for (uint j=0;j<rpx.size();j++){
1399  //int debugPx = rpx[j]->getID();
1400  //int debug2 = debugPx;
1401  rpx[j]->vol.clear(); // not actually necessary
1402  for(uint y=0;y<fTypes.size();y++){
1403  vector <double> vol_byu;
1404  double regForArea = reg->getValue("forArea_"+fTypes[y]);
1405  for (uint z=0;z<dClasses.size();z++){
1406  double regVol;
1407  regVol = z ? gfd("vol",regIds2[i],fTypes[y],dClasses[z],firstYear) : 0 ; // if z=0-> regVol= gfd(), otherwise regVol=0;
1408  double pxArea = rpx[j]->getDoubleValue("forArea_"+fTypes[y], true); // bug solved 20121109. get zero for not data
1409  if (pxArea<0.0){
1410  msgOut(MSG_CRITICAL_ERROR,"Error in initializePixelVolumes, negative pxArea!");
1411  }
1412  double pxVol = regForArea ? regVol * pxArea/regForArea: 0; // if we introduce new forest types without initial area we must avoid a 0/0 division
1413  //rpx[j]->changeValue(pxVol,"vol",fTypes[y],dClasses[z],firstYear);
1414  vol_byu.push_back(pxVol);
1415  }
1416  rpx[j]->vol.push_back(vol_byu);
1417  }
1418  }
1419  }
1421 }
1422 
1423 /**
1424  * @brief ModelCoreSpatial::assignSpMultiplierPropToVols assigns the spatial multiplier (used in the time of return) based no more on a Normal distribution but on the volumes present in the pixel: more volume, more the pixel is fit for the ft
1425  *
1426  * This function apply to the pixel a multiplier of time of passage that is inversely proportional to the volumes of that forest type present in the pixel.
1427  * The idea is that in the spots where we observe more of a given forest type are probabily the most suited ones to it.
1428  *
1429  * The overall multipliers **of time of passage** (that is, the one returned by Pixel::getMultiplier("tp_multiplier") ) will then be the product of this multiplier that account for spatial heterogeneity and of an eventual exogenous
1430  * multiplier that accounts for different scenarios among the spatio-temporal dimensions.
1431  *
1432  * Given that (forest type index omitted):
1433  * - \f$V_{p}\f$ = volume of a given ft in each pixel (p)
1434  * - \f$\bar{g}\f$ and \f$\sigma_{g}\f$ = regional average and standard deviation of the growth rate
1435  * - \f$m_{p}\f$ = multiplier of time of passage
1436  *
1437  * This multiplier is computed as:
1438  * - \f$ v_{p} = max(V) - V_{p}~~ \f$ A diff from the max volume is computed in each pixel
1439  * - \f$ vr_{p} = v_{p} * \bar{g}/\bar{v}~~ \f$ The volume diff is rescaled to match the regional growth rate
1440  * - \f$ vrd_{p} = vr_{p} - \bar{vr}~~ \f$ Deviation of the rescaled volumes are computed
1441  * - \f$ vrdr_{p} = vrd_{p} * \sigma_{g}/\sigma_{vr}~~ \f$ The deviations are then rescaled to match the standard deviations of the regional growth rate
1442  * - \f$ m_{p} = (vrdr_{p} + \bar{vr}) / \bar{g}~~ \f$ The multiplier is computed from the ratio of the average rescaled volumes plus rescaled deviation over the average growth rate.
1443  *
1444  * And it has the following properties:
1445  * - \f$\bar{m} = 1\f$
1446  * - \f$\sigma_{m} = cv_{g}\f$
1447  * - \f$m_{p} = V_{p}*\alpha+\beta\f$
1448  * - \f$m_{\bar{V}} = 1\f$
1449  *
1450  * For spreadsheet "proof" see the file computation_of_growth_multipliers_from_know_avg_sd_and_proportional_to_share_of_area_in_each_pixel.ods
1451  */
1452 void
1454 
1455  if(!MTHREAD->MD->getBoolSetting("useSpatialVarPropToVol")){return;}
1456  for(uint r=0;r<regIds2.size();r++){
1457  int rId = regIds2[r];
1458  ModelRegion* reg = MD->getRegion(regIds2[r]);
1459  vector <Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regIds2[r]);
1460  for(uint f=0;f<fTypes.size();f++){
1461  string ft = fTypes[f];
1462  double agr = gfd("agr",regIds2[r],ft,"");
1463  double sStDev = gfd("sStDev",regIds2[r],ft,"");
1464  vector<double> vols;
1465  vector<double> diffVols;
1466  vector<double> diffVols_rescaled;
1467  double diffVols_rescaled_deviation;
1468  double diffVols_rescaled_deviation_rescaled;
1469  double final_value;
1470  double multiplier;
1471  vector<double> multipliers; // for tests
1472 
1473  double vol_max, rescale_ratio_avg, rescale_ratio_sd;
1474  double diffVols_avg, diffVols_rescaled_avg;
1475  double diffVols_rescaled_sd;
1476 
1477  for (uint p=0;p<rpx.size();p++){
1478  Pixel* px = rpx[p];
1479  vols.push_back(vSum(px->vol[f]));
1480  } // end for each pixel
1481  vol_max=getMax(vols);
1482 
1483  for(uint p=0;p<vols.size();p++){
1484  diffVols.push_back(vol_max-vols[p]);
1485  }
1486 
1487  diffVols_avg = getAvg(diffVols);
1488  rescale_ratio_avg = (diffVols_avg != 0.0) ? agr/diffVols_avg : 1.0;
1489  for(uint p=0;p<diffVols.size();p++){
1490  diffVols_rescaled.push_back(diffVols[p]*rescale_ratio_avg);
1491  }
1492  diffVols_rescaled_avg = getAvg(diffVols_rescaled);
1493  diffVols_rescaled_sd = getSd(diffVols_rescaled,false);
1494 
1495  rescale_ratio_sd = (diffVols_rescaled_sd != 0.0) ? sStDev/diffVols_rescaled_sd : 1.0;
1496  for(uint p=0;p<diffVols_rescaled.size();p++){
1497  diffVols_rescaled_deviation = diffVols_rescaled[p] - diffVols_rescaled_avg;
1498  diffVols_rescaled_deviation_rescaled = diffVols_rescaled_deviation * rescale_ratio_sd;
1499  final_value = diffVols_rescaled_avg + diffVols_rescaled_deviation_rescaled;
1500  multiplier = (agr != 0.0) ? min(1.6, max(0.4,final_value/agr)) : 1.0; //20151130: added bounds for extreme cases. Same bonds as in Gis::applySpatialStochasticValues()
1501  // multiplier = 1.0;
1502 
1503  Pixel* px = rpx[p];
1504  px->setSpModifier(multiplier,f);
1505  multipliers.push_back(multiplier);
1506  }
1507 
1508  #ifdef QT_DEBUG
1509  // Check relaxed as we introduced bounds that may change slighly the avg and sd...
1510  double avgMultipliers = getAvg(multipliers);
1511  double sdMultipliers = getSd(multipliers,false);
1512  if ( avgMultipliers < 0.9 || avgMultipliers > 1.1){
1513  msgOut(MSG_CRITICAL_ERROR, "The average of multipliers of ft "+ ft +" for the region " + i2s(rId) + " is not 1!");
1514  }
1515  if ( ( sdMultipliers - (sStDev/agr) ) < -0.5 || ( sdMultipliers - (sStDev/agr) ) > 0.5 ){
1516  double cv = sStDev/agr;
1517  msgOut(MSG_CRITICAL_ERROR, "The sd of multipliers of ft "+ ft +" for the region " + i2s(rId) + " is not equal to the spatial cv for the region!");
1518  }
1519  #endif
1520  } // end for each ft
1521  } // end for each region
1522 }
1523 
1524 
1525 
1526 void
1528 
1529  ///< call initialiseDeathBiomassStocks(), initialiseProductsStocks() and initialiseEmissionCounters()
1531 
1532  for(uint i=0;i<regIds2.size();i++){
1533  vector<double> deathBiomass;
1534  for(uint j=0;j<fTypes.size();j++){
1535  double deathBiomass_ft = gfd("vMort",regIds2[i],fTypes[j],DIAM_ALL,DATA_NOW);
1536  deathBiomass.push_back(deathBiomass_ft);
1537  }
1538  MTHREAD->CBAL->initialiseDeathBiomassStocks(deathBiomass, regIds2[i]);
1539  vector<double>qProducts;
1540  for(int p=0;p<priProducts.size();p++){
1541  // for the primary products we consider only the exports as the domestic consumption is entirelly transformed in secondary products
1542  double int_exports = gpd("sa",regIds2[i],priProducts[p],DATA_NOW);
1543  qProducts.push_back(int_exports);
1544  }
1545  for(int p=0;p<secProducts.size();p++){
1546  // for the tranformed product we skip those that are imported, hence derived from other forest systems
1547  double consumption = gpd("dl",regIds2[i],secProducts[p],DATA_NOW); // dl = sl + net regional imports
1548  qProducts.push_back(consumption);
1549  }
1550  MTHREAD->CBAL->initialiseProductsStocks(qProducts, regIds2[i]);
1551 
1552  }
1553 }
1554 
1555 void
1557  int currentYear = MTHREAD->SCD->getYear();
1558  for(int y=currentYear;y>currentYear-30;y--){
1559  for(uint i=0;i<regIds2.size();i++){
1560  for(uint j=0;j<fTypes.size();j++){
1561  for (uint u=0;u<dClasses.size();u++){
1562  iisskey key(y,regIds2[i],fTypes[j],dClasses[u]);
1564  }
1565  }
1566  }
1567  }
1568 }
1569 
1570 /**
1571  * @brief ModelCoreSpatial::initializePixelArea
1572  *
1573  * This function compute the initial area by ft and dc. It requires vHa computed in computeCumulativeData, this is why it is
1574  * separated form the other initialisedPixelValues().
1575  * As the sum of area computed using vHa may differ from the one memorised in forArea_* layer, all values are scaled to match
1576  * it before being memorised.
1577  * Also assign area = area_l
1578  */
1579 
1580 void
1582  msgOut(MSG_INFO, "Starting initializing pixel-level area");
1583  if(!MD->getBoolSetting("usePixelData")) return;
1584  for(uint i=0;i<regIds2.size();i++){
1585  ModelRegion* reg = MD->getRegion(regIds2[i]);
1586  vector <Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regIds2[i]);
1587  for (uint p=0;p<rpx.size();p++){
1588  Pixel* px = rpx[p];
1589  double pxid= px->getID();
1590  for(uint j=0;j<fTypes.size();j++){
1591  string ft = fTypes[j];
1592  vector <double> tempAreas;
1593  vector <double> areasByFt;
1594  double pxArea = px->getDoubleValue("forArea_"+ft,true)/10000.0; //ha
1595  for (uint u=0;u<dClasses.size();u++){
1596  if(u==0){
1597  double regionArea = reg->getValue("forArea_"+ft,OP_SUM)/10000.0; //ha
1598  double regRegVolumes = gfd("vReg",regIds2[i],ft,""); // regional regeneration volumes.. ugly name !!
1599  double newVReg = regionArea ? regRegVolumes*pxArea/regionArea : 0.0;
1600  double tp_u0 = px->tp.at(j).at(0); // time of passage to reach the first production diameter class
1601  double entryVolHa = gfd("entryVolHa",regIds2[i],ft,"");
1602  double tempArea = (newVReg*1000000.0/entryVolHa)*tp_u0;
1603  tempAreas.push_back(tempArea);
1604  } else {
1605  string dc = dClasses[u];
1606  double dcVol = px->vol_l.at(j).at(u)*1000000.0; // m^3
1607  double dcVHa = px->vHa.at(j).at(u); // m^3/ha
1608  #ifdef QT_DEBUG
1609  if(dcVol < 0.0 || dcVHa < 0.0){
1610  msgOut(MSG_CRITICAL_ERROR, "Negative volumes or density in initializePixelArea");
1611  }
1612  #endif
1613  double tempArea = dcVHa?dcVol/dcVHa:0;
1614  tempAreas.push_back(tempArea);
1615  }
1616 
1617  } // end dc
1618  double sumTempArea = vSum(tempAreas);
1619  // double sharedc0 = 5.0/90.0; // an arbitrary share of total area allocated to first diameter class
1620  //tempAreas.at(0) = sumTempArea * sharedc0;
1621  //sumTempArea = vSum(tempAreas);
1622  double normCoef = sumTempArea?pxArea/ sumTempArea:0;
1623  //cout << i << '\t' << pxid << '\t' << ft << '\t' << normCoef << endl;
1624  #ifdef QT_DEBUG
1625  if(normCoef < 0.0){
1626  msgOut(MSG_CRITICAL_ERROR, "Negative normCoef in initializePixelArea");
1627  }
1628  #endif
1629  for (uint u=0;u<dClasses.size();u++){
1630  areasByFt.push_back(tempAreas.at(u)*normCoef); //manca la costruzione originale del vettore
1631  }
1632  #ifdef QT_DEBUG
1633  if (pxArea != 0.0){
1634  double ratio = vSum(areasByFt)/ pxArea; // vSum(areasByFt) should be equal to pxArea
1635  if(ratio < 0.99999999999 || ratio > 1.00000000001) {
1636  msgOut(MSG_CRITICAL_ERROR, "pxArea is not equal to vSum(areasByFt) in initializePixelArea");
1637  }
1638  }
1639  #endif
1640  px->area_l.push_back(areasByFt);
1641  /// \todo here I have finally also area_ft_dc_px and I can implement the new one I am in 2006
1642  } // end ft
1643  px->area = px->area_l; //Assigning initial value of area to the area of the old year
1644  } // end px
1645  } // end region
1646  loadExogenousForestLayers("area");
1647  /// \todo: also update area_l
1648 }
1649 
1650 void
1652 
1653  msgOut(MSG_INFO, "Starting computing some cumulative values..");
1654  int thisYear = MTHREAD->SCD->getYear();
1655 
1656 // double sumCumTP=0;
1657 // double sumVHa = 0;
1658 // double count = 0;
1659 // double avg_sumCumTp;
1660 // double avg_sumVHa;
1661 
1662  for(uint r2= 0; r2<regIds2.size();r2++){
1663  int regId = regIds2[r2];
1664  regPx = MTHREAD->MD->getRegion(regId)->getMyPixels();
1665 
1666  for (uint p=0;p<regPx.size();p++){
1667  Pixel* px = regPx[p];
1668  px->cumTp.clear();
1669  px->cumTp_exp.clear();
1670  px->vHa_exp.clear();
1671  px->vHa.clear();
1672  px->cumAlive.clear();
1673  px->cumAlive_exp.clear();
1674  double expType = px->expType;
1675 
1676  for(uint j=0;j<fTypes.size();j++){
1677  string ft = fTypes[j];
1678 
1679  double tp_multiplier_now = px->getMultiplier("tp_multiplier",ft,DATA_NOW);
1680  double tp_multiplier_t0 = px->getMultiplier("tp_multiplier",ft,firstYear);
1681  double mortCoef_multiplier_now = px->getMultiplier("mortCoef_multiplier",ft,DATA_NOW);
1682  double mortCoef_multiplier_t0 = px->getMultiplier("mortCoef_multiplier",ft,firstYear);
1683  double betaCoef_multiplier_now = px->getMultiplier("betaCoef_multiplier",ft,DATA_NOW);
1684  double betaCoef_multiplier_t0 = px->getMultiplier("betaCoef_multiplier",ft,firstYear);
1685  double pathMort_now, pathMort_t0;
1686 
1687  // calculating the cumulative time of passage and the (cumulativelly generated) vHa for each diameter class (depending on forest owners diam growth expectations)
1688  //loop(u$(ord(u)=1),
1689  // cumTp(u,i,lambda,essence) = tp_u1(i,essence,lambda);
1690  //);
1691  //loop(u$(ord(u)>1),
1692  // cumTp(u,i,lambda,essence) = cumTp(u-1,i,lambda,essence)+tp(u-1,i,lambda,essence);
1693  //);
1694  ////ceil(x) DNLP returns the smallest integer number greater than or equal to x
1695  //loop( (u,i,lambda,essence),
1696  // cumTp(u,i,lambda,essence) = ceil(cumTp(u,i,lambda,essence));
1697  //);
1698  vector <double> cumTp_temp; // cumulative time of passage to REACH a diameter class (tp is to LEAVE to the next one)
1699  vector <double> vHa_temp; // volume at hectar by each diameter class [m^3/ha]
1700  vector <double> cumAlive_temp; // cumulated alive rate to reach a given diameter class
1701  vector <double> cumTp_exp_temp; // expected version of cumTp_temp
1702  vector <double> vHa_exp_temp; // expected version of vHa_temp
1703  vector <double> cumAlive_exp_temp; // "expected" version of cumMort
1704 
1705  MD->setErrorLevel(MSG_NO_MSG); // as otherwise on 2007 otherwise sfd() will complain that is filling multiple years (2006 and 2007)
1706  for (uint u=0; u<dClasses.size(); u++){
1707  string dc = dClasses[u];
1708  double cumTp_u, cumTp_u_exp, cumTp_u_noExp, cumTp_u_fullExp;
1709  double tp, tp_exp, tp_noExp, tp_fullExp;
1710  double vHa_u, vHa_u_exp, vHa_u_noExp, vHa_u_fullExp, beta, beta_exp, beta_noExp, beta_fullExp, mort, mort_exp, mort_noExp, mort_fullExp;
1711  double cumAlive_u, cumAlive_exp_u;
1712  pathMort_now = px->getPathMortality(ft,dc,DATA_NOW);
1713  pathMort_t0 = px->getPathMortality(ft,dc,firstYear);
1714  double additionalMortality_now = px->getSTData("additionalMortality", ft, DATA_NOW, dc, 0.0);
1715  double additionalMortality_t0 = px->getSTData("additionalMortality", ft, firstYear, dc, 0.0);
1716 
1717  // only cumTp is depending for the expectations, as it is what it is used by owner to calculate return of investments.
1718  // the tp, beta and mort coefficients instead are the "real" ones as predicted by scientist for that specific time
1719 
1720  if(u==0) {
1721  // first diameter class.. expected and real values are the same (0)
1722  cumTp_u = 0.;
1723  vHa_u = 0.;
1724  cumAlive_u = 1.;
1725  cumTp_temp.push_back(cumTp_u);
1726  vHa_temp.push_back(vHa_u);
1727  cumTp_exp_temp.push_back(cumTp_u);
1728  vHa_exp_temp.push_back(vHa_u);
1729  cumAlive_temp.push_back(cumAlive_u);
1730  cumAlive_exp_temp.push_back(cumAlive_u);
1731  } else {
1732  // other diameter classes.. first dealing with real values and then with expected ones..
1733  // real values..
1734  // real values..
1735  tp = gfd("tp",regId,ft,dClasses[u-1],thisYear)*tp_multiplier_now;
1736  cumTp_u = cumTp_temp[u-1] + tp;
1737  if (u==1){
1738  /**
1739  Note on the effect of mortality modifiers on the entryVolHa.
1740  Unfortunatly for how it is defined the mortality multiplier (the ratio with the new mortality rate over the old one) we can't
1741  compute a entryVolHa based on it. It is NOT infact just like: vHa_adjusted = vHa_orig / mort_multiplier.
1742  The effect of mortality on the vHa of the first diameter class is unknow, and so we can't compute the effect of a relative
1743  increase.
1744  */
1745  vHa_u = gfd("entryVolHa",regId,ft,"",thisYear);
1746  mort = 0.; // not info about mortality first diameter class ("00")
1747  } else {
1748  mort = 1-pow(1-min(gfd("mortCoef",regId,ft,dClasses[u-1],thisYear)*mortCoef_multiplier_now+pathMort_now+additionalMortality_now,1.0),tp); // mortality of the previous diameter class
1749  beta = gfd("betaCoef",regId,ft,dc, thisYear)*betaCoef_multiplier_now;
1750  vHa_u = vHa_temp[u-1]*beta*(1-mort);
1751  }
1752  cumAlive_u = max(0.,cumAlive_temp[u-1]*(1-mort));
1753  cumAlive_temp.push_back(cumAlive_u);
1754  cumTp_temp.push_back(cumTp_u);
1755  vHa_temp.push_back(vHa_u);
1756  // expected values..
1757  /**
1758  param expType Specify how the forest owners (those that make the investments) behave will be the time of passage in the future in order to calculate the cumulative time of passage in turn used to discount future revenues.
1759  Will forest owners behave adaptively believing the time of passage between diameter classes will be like the observed one at time they make decision (0) or they will have full expectations believing forecasts (1) or something in the middle ?
1760  For compatibility with the GAMS code, a -1 value means using initial simulation tp values (fixed cumTp)."
1761  */
1762  if (expType == -1){
1763  tp_exp = gfd("tp",regId,ft,dClasses[u-1],firstYear)*tp_multiplier_t0;
1764  //tp = px->tp.at(u); no. not possible, tp stored at pixel level is the current year one
1765  cumTp_u_exp = cumTp_exp_temp[u-1]+tp_exp;
1766  cumTp_exp_temp.push_back(cumTp_u_exp);
1767  if(u==1) {
1768  vHa_u_exp = gfd("entryVolHa",regId,ft,"",firstYear);
1769  mort_exp = 0.; // not info about mortality first diameter class ("00")
1770  } else {
1771  mort_exp = 1-pow(1-min(gfd("mortCoef",regId,ft,dClasses[u-1], firstYear)*mortCoef_multiplier_t0+pathMort_t0+additionalMortality_t0,1.0),tp_exp); // mortality rate of previous diameter class
1772  beta_exp = gfd("betaCoef",regId,ft,dc, firstYear)*betaCoef_multiplier_t0;
1773  vHa_u_exp = vHa_exp_temp[u-1]*beta_exp*(1-mort_exp);
1774  }
1775  } else {
1776  double tp_multiplier_dynamic = px->getMultiplier("tp_multiplier",ft,thisYear+ceil(cumTp_exp_temp[u-1]));
1777  tp_noExp = gfd("tp",regId,ft,dClasses[u-1])*tp_multiplier_now;
1778  cumTp_u_noExp = cumTp_exp_temp[u-1]+tp_noExp;
1779  tp_fullExp = gfd("tp",regId,ft,dClasses[u-1],thisYear+ceil(cumTp_exp_temp[u-1]))*tp_multiplier_dynamic ; // time of passage that there should be to reach this diameter class in the year where the previous diameter class will be reached
1780  cumTp_u_fullExp = cumTp_exp_temp[u-1]+tp_fullExp ; // it adds to the time of passage to reach the previous diameter class the time of passage that there should be to reach this diameter class in the year where the previous diameter class will be reached
1781  cumTp_u_exp = cumTp_u_fullExp*expType+cumTp_u_noExp*(1-expType); // 20121108: it's math the same as cumTp_exp_temp[u-1] + tp
1782  cumTp_exp_temp.push_back(cumTp_u_exp);
1783  if(u==1) {
1784  vHa_u_noExp = gfd("entryVolHa",regId,ft,"",DATA_NOW);
1785  vHa_u_fullExp = gfd("entryVolHa",regId,ft,"",thisYear+ceil(cumTp_u));
1786  vHa_u_exp = vHa_u_fullExp*expType+vHa_u_noExp*(1-expType);
1787  mort_exp = 0.; // not info about mortality first diameter class ("00")
1788  } else {
1789  mort_noExp = 1-pow(1-min(1.0,gfd("mortCoef",regId,ft,dClasses[u-1], DATA_NOW)*mortCoef_multiplier_now+pathMort_now+additionalMortality_now), tp_noExp); // mortCoef is a yearly value. Mort coeff between class is 1-(1-mortCoeff)^tp
1790  double mortCoef_multiplier_dynamic = px->getMultiplier("mortCoef_multiplier",ft,thisYear+ceil(cumTp_exp_temp[u-1]));
1791  double pathMort_dynamic = px->getPathMortality(ft,dc,thisYear+ceil(cumTp_exp_temp[u-1]));
1792  double additionalMortality_dynamic = px->getSTData("additionalMortality", ft, thisYear+ceil(cumTp_exp_temp[u-1]), dc, 0.0);
1793  mort_fullExp = 1-pow(1-min(1.0,gfd("mortCoef",regId,ft,dClasses[u-1],thisYear+ceil(cumTp_exp_temp[u-1]))*mortCoef_multiplier_dynamic+pathMort_dynamic+additionalMortality_dynamic),tp_fullExp); // mortality of the previous diameter class
1794  //double debug1 = gfd("mortCoef",regId,ft,dClasses[u-1],thisYear+ceil(cumTp_exp_temp[u-1]));
1795  //double debug2 = debug1*mortCoef_multiplier_dynamic+pathMort_dynamic;
1796  //double debug3 = min(1.0,debug2);
1797  //double debug4 = 1.0-debug3;
1798  //double debug5 = pow(debug4,tp_fullExp);
1799  //double debug6 = 1.0-debug5;
1800 
1801 
1802  beta_noExp = gfd("betaCoef",regId,ft,dc, DATA_NOW)*betaCoef_multiplier_now;
1803  double betaCoef_multiplier_dynamic = px->getMultiplier("betaCoef_multiplier",ft,thisYear+ceil(cumTp_u));
1804  beta_fullExp = gfd("betaCoef",regId,ft,dc, thisYear+ceil(cumTp_u))*betaCoef_multiplier_dynamic;
1805  mort_exp = mort_fullExp*expType+mort_noExp*(1-expType);
1806  beta_exp = beta_fullExp*expType+beta_noExp*(1-expType);
1807  vHa_u_exp = vHa_exp_temp[u-1]*beta_exp*(1-mort_exp); // BUG !!! mort is yearly value, not between diameter class. SOLVED 20121108
1808  }
1809  }
1810  vHa_exp_temp.push_back(vHa_u_exp);
1811  cumAlive_exp_u = max(0.,cumAlive_exp_temp[u-1]*(1-mort_exp));
1812  cumAlive_exp_temp.push_back(cumAlive_exp_u);
1813 
1814  //cout << "********" << endl;
1815  //cout << "dc;mort;cumAlive;cumAlive_exp "<< endl ;
1816  //cout << dClasses[u] << ";"<< mort << ";" << cumAlive_u << ";" << cumAlive_exp_u << endl;
1817 
1818  }
1819  // debug stuff on vHa
1820  //double vHa_new = gfd("vHa",regId,ft,dc,DATA_NOW);
1821  //double hv2fa_old = gfd("hv2fa",regId,ft,dc,DATA_NOW);
1822  //cout << "Reg|Ft|dc|vHa (new)|1/hv2fa (old): " << regId << " | " << ft;
1823  //cout << " | " << dc << " | " << vHa_new << " | " << 1/hv2fa_old << endl;
1824 
1825  } // end of each diam
1826  //double pixID = px->getID();
1827  //cout << thisYear << ";"<< regIds2[r2] << ";" << pixID << ";" << ft << ";" << cumTp_exp_temp[3] << ";" << vHa_exp_temp[3] << endl;
1828  px->cumTp.push_back(cumTp_temp);
1829  px->vHa.push_back(vHa_temp);
1830  px->cumAlive.push_back(cumAlive_temp);
1831  px->cumTp_exp.push_back(cumTp_exp_temp);
1832  px->vHa_exp.push_back(vHa_exp_temp);
1833  px->cumAlive_exp.push_back(cumAlive_exp_temp);
1834 
1835  //sumCumTP += cumTp_exp_temp[3];
1836  //sumVHa += vHa_exp_temp[3];
1837  //count ++;
1838 
1839 
1840  } // end of each ft
1841  //double debug = 0.0;
1842  } // end of each pixel
1843  } // end of each region
1845  //avg_sumCumTp = sumCumTP/ count;
1846  //avg_sumVHa = sumVHa / count;
1847  //cout << "Avg sumCumTp_35 and sumVha_35: " << avg_sumCumTp << " and " << avg_sumVHa << " (" << count << ")" << endl;
1848  //exit(0);
1849 }
1850 
1851 void
1853  msgOut(MSG_INFO, "Starting resetting pixel level values");
1854  for(uint r2= 0; r2<regIds2.size();r2++){
1855  int regId = regIds2[r2];
1856  regPx = MTHREAD->MD->getRegion(regId)->getMyPixels();
1857  for (uint p=0;p<regPx.size();p++){
1858  Pixel* px = regPx[p];
1859  px->swap(VAR_VOL); // vol_l = vol
1860  px->swap(VAR_AREA); // area_l = area
1861  // 20121108 BUG! Solved, used empty (just return true if the vector is empty) instead of clear (it actually clears the vector)
1862  px->vol.clear(); // by ft,dc
1863  px->area = px->area_l; // ATTENCTION, DIFFERENT FROM THE OTHERS. Here it is not cleared, it is assigned the previous year as default
1864  /*px->area.clear(); // by ft,dc*/
1865  px->hArea.clear(); // by ft, dc
1866  px->deltaArea.clear();
1867  //px->regArea.clear(); // by year, ft NO, this one is a map, it doesn't need to be changed
1868  px->hVol.clear(); // by ft, dc
1869  px->hVol_byPrd.clear(); // by ft, dc, pp
1870  //px->in.clear(); // by pp
1871  //px->hr.clear(); // by pp
1872  px->vReg.clear(); // by ft
1873  px->expectedReturns.clear(); // by ft
1874  px->beta.clear();
1875  px->mort.clear();
1876  px->tp.clear();
1877  px->cumTp.clear();
1878  px->vHa.clear();
1879  px->cumTp_exp.clear();
1880  px->vHa_exp.clear();
1881  px->cumAlive.clear();
1882  px->cumAlive_exp.clear();
1883  px->vMort.clear();
1884  //std::fill(rpx[j]->vMort.begin(), rpx[j]->vMort.end(), 0.0);
1885 
1886  }
1887  }
1888 }
1889 
1890 void
1892 
1893  msgOut(MSG_INFO, "Starting cashing on pixel spatial-level exogenous data");
1894  bool applyAvalCoef = MTHREAD->MD->getBoolSetting("applyAvalCoef");
1895 
1896  for(uint r2= 0; r2<regIds2.size();r2++){
1897  int regId = regIds2[r2];
1898  regPx = MTHREAD->MD->getRegion(regId)->getMyPixels();
1899  for (uint p=0;p<regPx.size();p++){
1900  Pixel* px = regPx[p];
1901  px->tp.clear();
1902  px->beta.clear();
1903  px->mort.clear();
1904  px->avalCoef.clear();
1905 
1906  for(uint j=0;j<fTypes.size();j++){
1907  string ft = fTypes[j];
1908  vector <double> tp_byu;
1909  vector <double> beta_byu;
1910  vector <double> mort_byu;
1911 
1912  double tp_multiplier_now = px->getMultiplier("tp_multiplier",ft,DATA_NOW);
1913  double mortCoef_multiplier_now = px->getMultiplier("mortCoef_multiplier",ft,DATA_NOW);
1914 
1915  double betaCoef_multiplier_now = px->getMultiplier("betaCoef_multiplier",ft,DATA_NOW);
1916  double avalCoef_ft = applyAvalCoef?px->getSTData("avalCoef",ft,DATA_NOW):1.0;
1917 
1918  if(avalCoef_ft == MTHREAD->MD->getDoubleSetting("noValue")){
1919  msgOut(MSG_CRITICAL_ERROR,"applyAvalCoef has benn asked for, but I can't find the avalCoef in the data!");
1920  }
1921 
1922  for (uint u=0; u<dClasses.size(); u++){
1923  string dc = dClasses[u];
1924  double pathMortality = px->getPathMortality(ft,dc,DATA_NOW);
1925  double additionalMortality = px->getSTData("additionalMortality", ft, DATA_NOW, dc, 0.0);
1926  double tp, beta_real, mort_real;
1927  if (u==0){
1928  // tp of first diameter class not making it change across the time dimension, otherwise problems in getting the rigth past
1929  // regenerations. BUT good, px->tp.at(0) is used only to pick up the right regeneration, so the remaining of the model
1930  // uses the getMultiplier version and cumTp consider the dynamic effects also in the first dc.
1931  tp = gfd("tp",regId,ft,dClasses[u],firstYear)*px->getMultiplier("tp_multiplier",ft,firstYear); // tp is defined also in the first diameter class, as it is the years to reach the NEXT diameter class
1932  } else {
1933  tp = gfd("tp",regId,ft,dClasses[u],DATA_NOW)*tp_multiplier_now; // tp is defined also in the first diameter class, as it is the years to reach the NEXT diameter class
1934  }
1935  beta_real = u?gfd("betaCoef",regId,ft,dClasses[u],DATA_NOW)*betaCoef_multiplier_now:0;
1936  mort_real = min(u?gfd("mortCoef",regId,ft,dClasses[u],DATA_NOW)*mortCoef_multiplier_now+pathMortality+additionalMortality :0,1.0); //Antonello, bug fixed 20160203: In any case, natural plus pathogen mortality can not be larger than 1!
1937  tp_byu.push_back(tp);
1938  beta_byu.push_back(beta_real);
1939  mort_byu.push_back(mort_real);
1940  } // end of each dc
1941  px->tp.push_back(tp_byu);
1942  px->beta.push_back(beta_byu);
1943  px->mort.push_back(mort_byu);
1944  px->avalCoef.push_back(avalCoef_ft);
1945  } // end of each ft
1946  } // end of each pixel
1947  } // end of each region
1948 }
1949 
1950 void
1952  msgOut(MSG_INFO, "Starting computing inventary available for this year..");
1953  int nbounds = pow(2,priProducts.size());
1954  vector<vector<int>> concernedPriProductsTotal = MTHREAD->MD->createCombinationsVector(priProducts.size());
1955  int currentYear = MTHREAD->SCD->getYear();
1956 
1957  for(uint i=0;i<regIds2.size();i++){
1958  int r2 = regIds2[i];
1959  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
1960  //Gis* GIS = MTHREAD->GIS;
1961  regPx = REG->getMyPixels();
1962  vector <double> in_reg(priProducts.size(),0.); // should have ceated a vector of size priProducts.size(), all filled with zeros
1963  vector <double> in_deathTimber_reg(priProducts.size(),0.); // should have ceated a vector of size priProducts.size(), all filled with zeros
1964  for (uint p=0;p<regPx.size();p++){
1965  Pixel* px = regPx[p];
1966  //int debugPx = px->getID();
1967  //int debug2 = debugPx;
1968  //px->in.clear();
1969  for(uint pp=0;pp<priProducts.size();pp++){
1970  double in = 0;
1971  for(uint ft=0;ft<fTypes.size();ft++){
1972  for(uint dc=0;dc<dClasses.size();dc++){
1973  in += app(priProducts[pp],fTypes[ft],dClasses[dc])*px->vol_l.at(ft).at(dc)*px->avalCoef.at(ft);
1974  }
1975  }
1976  //px->in.push_back(in);
1977  in_reg.at(pp) += in;
1978  } // end of each priProduct
1979  } // end each pixel
1980 
1981 
1982  for(uint pp=0;pp<priProducts.size();pp++){
1983  vector<string> priProducts_vector;
1984  priProducts_vector.push_back(priProducts[pp]);
1985 
1986  double in_deathMortality = MD->getAvailableDeathTimber(priProducts_vector,r2,currentYear-1);
1987  in_deathTimber_reg.at(pp) += in_deathMortality;
1988 
1989  // Even if I fixed all the lower bounds to zero in Opt::get_bounds_info still the model
1990  // doesn't solve with no-forest in a region.
1991  // Even with 0.0001 doesn't solve !!
1992  // With 0.001 some scenarios doesn't solve in 2093
1993  // With 0.003 vRegFixed doesn't solve in 2096
1994  // Tried with 0.2 but no changes, so put it back on 0.003
1995  //spd(max(0.001,in_reg.at(pp)),"in",r2,priProducts[pp],DATA_NOW,true);
1996  spd(in_reg.at(pp),"in",r2,priProducts[pp],DATA_NOW,true);
1997  spd(in_deathTimber_reg.at(pp),"in_deathTimber",r2,priProducts[pp],DATA_NOW,true);
1998  #ifdef QT_DEBUG
1999  if (in_reg.at(pp) < -0.0){
2000  msgOut(MSG_CRITICAL_ERROR,"Negative inventory");
2001  }
2002  #endif
2003  }
2004 
2005  // ##### Now creating a set of bonds for the optimisation that account of the fact that the same ft,dc can be used for multiple products:
2006 
2007  // 20160928: Solved a big bug: for each combination instead of taking the UNION of the various priProduct inventory sets I was taking the sum
2008  // Now both the alive and the death timber are made from the union
2009  // 20150116: As the same (ft,dc) can be used in more than one product knowing -and bounding the supply in the optimisation- each single
2010  // in(pp) is NOT enought.
2011  // We need to bound the supply for each possible combination, that is for 2^(number of prim.pr)
2012  // Here we compute the detailed inventory. TO.DO: Create the pounds in Opt. done
2013  // 20160209: Rewritten and corrected a bug that was not giving enought inv to multiproduct combinations
2014  for (uint i=0; i<nbounds; i++){
2015  vector<int> concernedPriProducts = concernedPriProductsTotal[i];
2016  vector<string> concernedPriProducts_ids = positionsToContent(priProducts,concernedPriProducts);
2017  //double debug = 0.0;
2018  //for(uint z=0;z<concernedPriProducts.size();z++){
2019  // debug += gpd("in",r2,priProducts[concernedPriProducts[z]]); // to.do: this will need to be rewritten checked!
2020  //}
2021  double bound_alive = MD->getAvailableAliveTimber(concernedPriProducts_ids,r2); // From px->vol_l, as in "in"
2022  double bound_deathTimber = MD->getAvailableDeathTimber(concernedPriProducts_ids,r2,currentYear-1); // From deathTimberInventory map
2023  double bound_total = bound_alive + bound_deathTimber;
2024 
2025  REG->inResByAnyCombination[i] = bound_total;
2026  //REG->inResByAnyCombination_deathTimber[i] = bound_deathTimber;
2027  } // end for each bond
2028  } // end each region
2029 }
2030 
2031 void
2033  msgOut(MSG_INFO, "Updating map areas..");
2034 
2035  if (!oldVol2AreaMethod){
2036  if(!MD->getBoolSetting("usePixelData")) return;
2037  for(uint i=0;i<regIds2.size();i++){
2038  ModelRegion* reg = MD->getRegion(regIds2[i]);
2039  vector <Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regIds2[i]);
2040  for (uint p=0;p<rpx.size();p++){
2041  Pixel* px = rpx[p];
2042  double pxid= px->getID();
2043  for(uint j=0;j<fTypes.size();j++){
2044  string ft = fTypes[j];
2045  double forArea = vSum(px->area.at(j));
2046  #ifdef QT_DEBUG
2047  if(forArea < 0.0 ){
2048  msgOut(MSG_CRITICAL_ERROR, "Negative forArea in updateMapAreas");
2049  }
2050  #endif
2051  px->changeValue("forArea_"+ft, forArea*10000);
2052  } // end ft
2053  } // end px
2054  } // end region
2055  } else {
2056  int currentYear = MTHREAD->SCD->getYear();
2057  map<int,double> forestArea; // foresta area by each region
2058  pair<int,double > forestAreaPair;
2059  vector<int> l2Regions = MTHREAD->MD->getRegionIds(2, true);
2060  vector <string> fTypes = MTHREAD->MD->getForTypeIds();
2061  int nFTypes = fTypes.size();
2062  int nL2Regions = l2Regions.size();
2063  for(int i=0;i<nL2Regions;i++){
2064  int regId = l2Regions[i];
2065  vector<Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regId);
2066  for(int j=0;j<nFTypes;j++){
2067  string ft = fTypes[j];
2068  //double regForArea = reg->getValue("forArea_"+ft);
2069  //double harvestedArea = gfd("harvestedArea",regId,ft,DIAM_ALL);
2070  //double regArea = gfd("regArea",regId,ft,DIAM_ALL);
2071  //cout << "Regid/ft/area/harvested/regeneration: " <<regId<<";"<<ft<<";"<<regForArea<<";"<<harvestedArea<<";" <<regArea<<endl;
2072  //double newAreaNet = regArea-harvestedArea;
2073  //double newAreaRatio = newAreaNet / regForArea;
2074  for(uint z=0;z<rpx.size();z++){
2075  Pixel* px = rpx[z];
2076  double oldValue = px->getDoubleValue("forArea_"+ft,true)/10000;
2077  double hArea = vSum(px->hArea.at(j)); //bug 20140205 areas in the model are in ha, in the layer in m^2
2078  double regArea = findMap(px->regArea,currentYear).at(j); //bug 20140205 areas in the model are in ha, in the layer in m^2
2079  //double newValue = oldValue*(1. + newAreaRatio);
2080  double newValue = oldValue-hArea+regArea;
2081  double areaNetOfRegeneration = oldValue-hArea;
2082  #ifdef QT_DEBUG
2083  if (areaNetOfRegeneration<0.0){
2084  msgOut(MSG_CRITICAL_ERROR,"areaNetOfRegeneration negative in updateMapAreas");
2085  }
2086  if (newValue<0.0){
2087  msgOut(MSG_CRITICAL_ERROR,"for area negative in updateMapAreas");
2088  }
2089  #endif
2090  rpx[z]->changeValue("forArea_"+ft, newValue*10000);
2091  }
2092  }
2093  }
2094  }
2095 }
2096 
2097 void
2099 
2100 vector<int> l2Regions = MTHREAD->MD->getRegionIds(2, true);
2101 vector <string> fTypes = MTHREAD->MD->getForTypeIds();
2102 int nFTypes = fTypes.size();
2103 int nL2Regions = l2Regions.size();
2104 for(int i=0;i<nL2Regions;i++){
2105  int regId = l2Regions[i];
2106  vector<Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regId);
2107  for(int j=0;j<nFTypes;j++){
2108  string ft = fTypes[j];
2109  for(uint z=0;z<rpx.size();z++){
2110  Pixel* px = rpx[z];
2111  double vol = vSum(px->vol.at(j));
2112  double expectedReturns = px->expectedReturns.at(j);
2113  if(MTHREAD->GIS->layerExist("vol_"+ft)){
2114  rpx[z]->changeValue("vol_"+ft, vol);
2115  }
2116  if(MTHREAD->GIS->layerExist("expectedReturns_"+ft)){
2117  rpx[z]->changeValue("expectedReturns_"+ft, expectedReturns);
2118  }
2119  }
2120  }
2121 }
2122 
2123 // update GUI image..
2124 for(int j=0;j<nFTypes;j++){
2125  string ft = fTypes[j];
2126  MTHREAD->GIS->updateImage("vol_"+ft);
2127  MTHREAD->GIS->updateImage("expectedReturns_"+ft);
2128 }
2129 
2130 
2131 }
2132 
2133 
2134 void
2136 
2137  msgOut(MSG_INFO, "Summing data pixels->region..");
2138  //vector <string> outForVariables = MTHREAD->MD->getStringVectorSetting("outForVariables");
2139  int currentYear = MTHREAD->SCD->getYear();
2140 
2141  //map<vector<string>,double> hVol_byPrd; // by regid, ft, dc, pp
2142  map<tr1::array<string, 4>,double> hVol_byPrd; // by regid, ft, dc, pp
2143 
2144  // OLD CODE TO
2145  for(uint r2= 0; r2<regIds2.size();r2++){
2146  int regId = regIds2[r2];
2147  regPx = MTHREAD->MD->getRegion(regId)->getMyPixels(); // by regId, ft, dc, pp
2148 
2149  // vector < vector <vector <double> > >hVol_byPrd; // by ft, dc, pp
2150 
2151  for(uint j=0;j<fTypes.size();j++){
2152  string ft = fTypes[j];
2153 
2154  double regArea = 0.;
2155  double sumAreaByFt = 0.;
2156  double pxForAreaByFt = 0.;
2157  double vReg = 0.;
2158  double sumSumHProductivity = 0.;
2159  double sumHArea = 0.;
2160 
2161  for (uint u=0; u<dClasses.size(); u++){
2162  string dc = dClasses[u];
2163  double vol =0.;
2164  double hV = 0.;
2165  double hArea = 0.;
2166  double vMort = 0.;
2167  double sumHProductivity = 0.; // productivity * area
2168  for (uint pi=0;pi<regPx.size();pi++){
2169  Pixel* px = regPx[pi];
2170  vol += px->vol.at(j).at(u);
2171  hV += px->hVol.at(j).at(u);
2172  hArea += px->hArea.at(j).at(u);
2173  vMort += px->vMort.at(j).at(u);
2174  sumHProductivity += (px->hProductivity.at(j).at(u) * px->hArea.at(j).at(u));
2175  if(MD->getBoolSetting("outDetailedHv",DATA_NOW)){
2176  for(uint pp=0;pp<priProducts.size();pp++){
2177  string pr = priProducts[pp];
2178  // vector <string> hVKey = {i2s(regId), ft, dc, pr};
2179  tr1::array<string, 4> hVKey = {i2s(regId), ft, dc, pr};
2180  //double debug = px->hVol_byPrd.at(j).at(u).at(pp);
2181  //cout << px->getID() << '\t' << j << '\t' << u << '\t' << pp << '\t' << debug << endl;
2182  incrOrAddMapValue(hVol_byPrd, hVKey, px->hVol_byPrd.at(j).at(u).at(pp));
2183  }
2184  }
2185  }
2186  if(u){
2187  sfd(vol,"vol",regId,ft,dc,DATA_NOW);
2188  sfd(hV,"hV",regId,ft,dc,DATA_NOW,true);
2189  double hProductivityByDc = hArea ? sumHProductivity/hArea : 0.;
2190  sumSumHProductivity += (hProductivityByDc*hArea);
2191  sumHArea += hArea;
2192  sfd(hProductivityByDc,"hProductivityByDc",regId,ft,dc,DATA_NOW,true);
2193  sfd(hArea,"harvestedArea",regId,ft,dc,DATA_NOW, true);
2194  sfd(vMort,"vMort",regId,ft,dc,DATA_NOW,true);
2195  double vol_1 = gfd("vol",regId,ft,dc,currentYear-1);
2196  if(vol_1){
2197  sfd(hV/vol_1,"hr",regId,ft,dc,DATA_NOW, true);
2198  } else {
2199  sfd(0.,"hr",regId,ft,dc,DATA_NOW, true);
2200  }
2201 
2202  }
2203  } // end foreach dc
2204  double hProductivity = sumHArea? sumSumHProductivity / sumHArea : 0.;
2205  sfd(hProductivity,"hProductivity",regId,ft,"",DATA_NOW,true);
2206 
2207  for (uint p=0;p<regPx.size();p++){
2208  Pixel* px = regPx[p];
2209  vReg += px->vReg.at(j);
2210  regArea += findMap(px->regArea,currentYear).at(j);
2211  pxForAreaByFt = (px->getDoubleValue("forArea_"+ft,true)/10000);
2212 
2213  sumAreaByFt += pxForAreaByFt;
2214  //double debug1 = sumAreaByFt;
2215  if(! (sumAreaByFt >= 0.0) ){
2216  msgOut(MSG_CRITICAL_ERROR,"sumAreaByFt is not non negative.");
2217  }
2218  }
2219  sfd(vReg,"vReg",regId,ft,"",DATA_NOW, true);
2220  sfd(regArea,"regArea",regId,ft,"",DATA_NOW, true);
2221  sfd(sumAreaByFt,"forArea",regId,ft,"",DATA_NOW, true);
2222  } // end of for each ft
2223 
2224  for (uint p=0;p<regPx.size();p++){
2225  Pixel* px = regPx[p];
2226  double totPxForArea = vSum(px->area);
2227 
2228 #ifdef QT_DEBUG
2229  double totPxForArea_debug = 0.0;
2230  for(uint j=0;j<fTypes.size();j++){
2231  string ft = fTypes[j];
2232  totPxForArea_debug += (px->getDoubleValue("forArea_"+ft,true)/10000);
2233  }
2234 
2235  if ( (totPxForArea - totPxForArea_debug) > 0.0001 || (totPxForArea - totPxForArea_debug) < -0.0001 ){
2236  cout << "*** ERROR: area discrepance in pixel " << px->getID() << " of " << (totPxForArea - totPxForArea_debug) << " ha!" << endl;
2237  msgOut(MSG_CRITICAL_ERROR,"Total forest area in pixel do not coincide if token from layer forArea or (pixel) vector area!");
2238  }
2239 #endif
2240  } // end of each pixel
2241 
2242 
2243  } // end each region
2244  //cout << "done" << endl;
2245  MTHREAD->DO->printDetailedHV(hVol_byPrd);
2246 
2247 
2248  // Taking care of expected returns here..
2249  // (Changed 25/08/2016 afternoon: expRet{ft,r} are now sum{px}{expRet{ft,px}*fArea_{px}}/fArea{r} and no longer sum{px}{expRet{ft,px}*fArea_{px,ft}}/fArea{r,ft} )
2250  // Also now we report the expReturns by group and by forest, each of which is made only with the best ones within their group
2251 
2252  vector<string> parentFtypes = MTHREAD->MD->getForTypeParents();
2253 
2254  for(uint r2= 0; r2<regIds2.size();r2++){
2255  int regId = regIds2[r2];
2256  regPx = MTHREAD->MD->getRegion(regId)->getMyPixels();
2257  double totRegionForArea = 0.;
2258  double totSumExpRet = 0.;
2259  vector <double> totSumExpRet_byFTParent(parentFtypes.size(),0.0);
2260  vector <double> totSumExpRet_byFTypes(fTypes.size(),0.0);
2261 
2262  // First computing the sumExpectedReturns..
2263  for (uint p=0;p<regPx.size();p++){
2264  Pixel* px = regPx[p];
2265  //int debug_pxid = px->getID();
2266  double pxForArea = vSum(px->area);
2267  totRegionForArea += pxForArea;
2268  double bestPxExpectedRet = getMax(px->expectedReturnsNotCorrByRa);
2269  for(uint i=0;i<parentFtypes.size();i++){
2270  vector <string> childIds = MTHREAD->MD->getForTypeChilds(parentFtypes[i]);
2271  vector <int> childPos = MTHREAD->MD->getForTypeChilds_pos(parentFtypes[i]);
2272  vector<double> pxExpReturnsByChilds(childPos.size(),0.0);
2273  for(uint j=0;j<childPos.size();j++){
2274  double pxExpReturn_singleFt = px->expectedReturns.at(childPos[j]);
2275  // Manual fix to not have the expected returns of ash within the general "broadL" expected returns.
2276  // To do: remove it after we work on the ash project.. I don't like manual fixes !!!
2277  pxExpReturnsByChilds.at(j) = (childIds.at(j) == "ash") ? 0.0 : pxExpReturn_singleFt;
2278  //pxExpReturnsByChilds.at(j) = pxExpReturn_singleFt;
2279  totSumExpRet_byFTypes.at(childPos[j]) += pxExpReturn_singleFt*pxForArea;
2280  } // end of each ft
2281  totSumExpRet_byFTParent[i] += getMax(pxExpReturnsByChilds)*pxForArea;
2282  } // end for each partentFt
2283  totSumExpRet += bestPxExpectedRet * pxForArea;
2284  } // end for each px
2285 
2286  // ..and now computing the expReturns and storing them
2287  for(uint i=0;i<parentFtypes.size();i++){
2288  vector <int> childPos = MTHREAD->MD->getForTypeChilds_pos(parentFtypes[i]);
2289  for(uint j=0;j<childPos.size();j++){
2290  //double debug1 = totSumExpRet_byFTypes.at(childPos[j])/totRegionForArea;
2291  sfd(totSumExpRet_byFTypes.at(childPos[j]),"sumExpReturns",regId,fTypes.at(childPos[j]),"",DATA_NOW, true);
2292  sfd(totSumExpRet_byFTypes.at(childPos[j])/totRegionForArea,"expReturns",regId,fTypes.at(childPos[j]),"",DATA_NOW, true);
2293  } // end of each ft
2294  //double debug2 = totSumExpRet_byFTParent.at(i)/totRegionForArea;
2295  sfd(totSumExpRet_byFTParent.at(i),"sumExpReturns",regId,parentFtypes[i],"",DATA_NOW, true);
2296  sfd(totSumExpRet_byFTParent.at(i)/totRegionForArea,"expReturns",regId,parentFtypes[i],"",DATA_NOW, true);
2297 
2298  } // end for each partentFt
2299  //double debug3 = totSumExpRet/totRegionForArea;
2300  sfd(totSumExpRet,"sumExpReturns",regId,"","",DATA_NOW, true);
2301  sfd(totSumExpRet/totRegionForArea,"expReturns",regId,"","",DATA_NOW, true);
2302 
2303  } // end for each region
2304 
2305  // Computing pathogens share of forest invasion
2306  if(MD->getBoolSetting("usePathogenModule")){
2307  for(uint r2= 0; r2<regIds2.size();r2++){
2308  int regId = regIds2[r2];
2309  regPx = MTHREAD->MD->getRegion(regId)->getMyPixels();
2310  double totalForArea = 0.0;
2311  double invadedArea = 0.0;
2312  for (uint p=0;p<regPx.size();p++){
2313  Pixel* px = regPx[p];
2314  int invaded = 0.0;
2315  for(uint j=0;j<fTypes.size();j++){
2316  for (uint u=0; u<dClasses.size(); u++){
2317  if(px->getPathMortality(fTypes[j],dClasses[u]) > 0){
2318  invaded = 1.0;
2319  }
2320  }
2321  }
2322  totalForArea += vSum(px->area);
2323  invadedArea += vSum(px->area)*invaded;
2324  }
2325  sfd(invadedArea/totalForArea,"totalShareInvadedArea",regId,"","",DATA_NOW, true);
2326  }
2327  } // end we are using path model
2328 
2329 
2330 
2331 
2332 }
2333 /**
2334  * This function call registerHarvesting() (accounts for emissions from for. operations), registerDeathBiomass() (registers new stocks of death biomass),
2335  * registerProducts() (registers new stock of products) and registerTransports() (accounts for emissions from transportation).
2336  *
2337  * It pass to registerProducts():
2338  * - for primary products, the primary products exported out of the country, but not those exported to other regions or used in the region as
2339  * these are assumed to be totally transformed to secondary products;
2340  * - for secondary products, those produced in the region from locally or regionally imported primary product plus those secondary products
2341  * imported from other regions, less those exported to other regions. It doesn't include the secondary products imported from abroad the country.
2342  */
2343 void
2345 
2346  //void registerHarvesting(const int & regId, const string & fType, const double & value); ///< register the harvesting of trees -> cumEmittedForOper
2347  //void registerDeathBiomass(const double &value, const int & regId, const string &fType);
2348  //void registerProducts(const double &value, const int & regId, const string &productName);
2349  //void registerTransports(const double &distQ, const int & regId);
2350 
2351  for(uint i=0;i<regIds2.size();i++){
2352  for(uint j=0;j<fTypes.size();j++){
2353  double deathBiomass = gfd("vMort",regIds2[i],fTypes[j],DIAM_ALL,DATA_NOW);
2354  double harvesting = gfd("hV",regIds2[i],fTypes[j],DIAM_ALL,DATA_NOW);
2355  MTHREAD->CBAL->registerDeathBiomass(deathBiomass, regIds2[i], fTypes[j]); // register new stock
2356  MTHREAD->CBAL->registerHarvesting(harvesting, regIds2[i], fTypes[j]); // account for emissions. Added 201500715: it also moves the extra biomass to the death biomass pool
2357  }
2358 
2359  for(uint p=0;p<priProducts.size();p++){
2360  // for the primary products we consider only the exports as the domestic consumption is entirelly transformed in secondary products
2361  double int_exports = gpd("sa",regIds2[i],priProducts[p],DATA_NOW);
2362  MTHREAD->CBAL->registerProducts(int_exports, regIds2[i], priProducts[p]); // register new stock
2363  }
2364  for(uint p=0;p<secProducts.size();p++){
2365  // for the tranformed product we skip those that are imported, hence derived from other forest systems
2366  // but we consider those coming from other regions
2367  double consumption = gpd("dl",regIds2[i],secProducts[p],DATA_NOW); // dl = sl + net regional imports
2368  MTHREAD->CBAL->registerProducts(consumption, regIds2[i], secProducts[p]); // register new stock
2369  }
2370 
2371  }
2372  for (uint r1=0;r1<l2r.size();r1++){
2373  for (uint r2=0;r2<l2r[r1].size();r2++){
2374  int rfrom= l2r[r1][r2];
2375  double distQProd = 0.0;
2376  for (uint r3=0;r3<l2r[r1].size();r3++){
2377  int rto = l2r[r1][r3];
2378  double dist = gpd("dist",rfrom,"",DATA_NOW,i2s(rto)); //km
2379  for(uint p=0;p<allProducts.size();p++){
2380  distQProd += dist*gpd("rt",rfrom,allProducts[p],DATA_NOW,i2s(rto)); //km*Mm^3
2381  }
2382  }
2383  MTHREAD->CBAL->registerTransports(distQProd, rfrom);
2384  }
2385  }
2386  MTHREAD->CBAL->HWP_eol2energy(); // used to compute the energy substitution from hwp that reach the end of life and doesn't go to landfil. Previously the energy substitution was computed in registerProducts(), that is at the time when the product was produced.
2387 
2388 }
2389 
2390 /**
2391  * Compute the expectation weighted price based on the ratio of the international (world) price between the future and now.
2392  *
2393  * @param curLocPrice The local current price
2394  * @param worldCurPrice The world current price
2395  * @param worldFutPrice The world future price
2396  * @param sl Supply local
2397  * @param sa Supply abroad
2398  * @param expCoef The expectation coefficient for prices for the agent [0,1]
2399  * @return The expType-averaged local (or weighter) price
2400  */
2401 double
2402 ModelCoreSpatial::computeExpectedPrice(const double & curLocPrice, const double & worldCurPrice, const double & worldFutPrice, const double & sl, const double & sa, const double & expCoef){
2403  double fullExpWPrice = (curLocPrice*(worldFutPrice/worldCurPrice)*sl+worldFutPrice*sa)/(sa+sl);
2404  double curWPrice = (curLocPrice*sl+worldCurPrice*sa)/(sl+sa);
2405  return curWPrice * (1-expCoef) + fullExpWPrice * expCoef;
2406 }
2407 
2408 /**
2409  * It uses volumes from gis data to "move" volumes from one forest type to the other (when called with what="vol"). Then it moves areas
2410  * proportionally and, as dc0 volumes are not defined but area it is, compute, again proportionally, area in destination forest times for dc=0
2411  * It acts on the pix->vol, pix->area and pix->area_l vectors. It also create/update the px->values layer map for the area, but it doesn't cash the
2412  * results in forDataMap.
2413  *
2414  * It is called first with parameter what="vol" in initializePixelVolumes() and then with what="area" in initializePixelAreas().
2415  * As we need the original volumes in the area allocation, original_vols is set as a static variable.
2416  *
2417  */
2418 void
2420  if(!MD->getBoolSetting("useSpExplicitForestTypes")) return;
2421 
2422  int nFTypes = fTypes.size();
2423  int nDC = dClasses.size();
2424  int pxC = 0;
2425 
2426  for(uint ir=0;ir<regIds2.size();ir++){
2427  int r2 = regIds2[ir];
2428  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2429  regPx = REG->getMyPixels();
2430  pxC += regPx.size();
2431  }
2432 
2433  static vector<vector<vector<double>>> original_vols(pxC, vector<vector<double>>(nFTypes, vector<double>(nDC, 0.0))); // by px counter, ftype, dc
2434 
2435  if(what=="vol"){
2436  // first, before transfering volumes, saving the original ones..
2437  for(uint i=0;i<fTypes.size();i++){
2438  for (uint u=0; u<dClasses.size(); u++){
2439  int pxC_loc = 0;
2440  for(uint ir=0;ir<regIds2.size();ir++){
2441  int r2 = regIds2[ir];
2442  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2443  regPx = REG->getMyPixels();
2444  for (uint p=0;p<regPx.size();p++){
2445  Pixel* px = regPx[p];
2446  original_vols[pxC_loc][i][u] += px->vol[i][u];
2447  pxC_loc ++;
2448  }
2449  }
2450  }
2451  }
2452  for(uint i=0;i<fTypes.size();i++){
2453  string fti = fTypes[i];
2454  for(uint o=0;o<fTypes.size();o++){
2455  string fto = fTypes[o];
2456  for (uint u=1; u<dClasses.size(); u++){ // first diameter class volumes are computed from the model..
2457  string layerName = "spInput#vol#"+fto+"#"+fti+"#"+i2s(u);
2458  if (MTHREAD->GIS->layerExist(layerName)){
2459  for(uint ir=0;ir<regIds2.size();ir++){
2460  int r2 = regIds2[ir];
2461  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2462  regPx = REG->getMyPixels();
2463  for (uint p=0;p<regPx.size();p++){
2464  Pixel* px = regPx[p];
2465  double vol_transfer = min(px->getDoubleValue(layerName,true)/1000000,px->vol[i][u]) ; // Vol in the layer are in m^3, in the model in Mm^3
2466  px->vol[i][u] -= vol_transfer;
2467  px->vol[o][u] += vol_transfer;
2468  }
2469  }
2470  }
2471  }
2472  }
2473  }
2474  }
2475 
2476  if(what=="area"){
2477  /**
2478  Allocate area proportionally to volumes (see file test_proportional_computation_of_areas_from_volumes.ods)
2479  Example:
2480  FtIn FtOut Vtrasfer
2481  con ash 0.2
2482  brHf ash 0.1
2483  brCopp ash 0.3
2484  con oak 0.3
2485  brHf oak 0.2
2486  brCopp oak 0.1
2487 
2488  Vorig Aorig Vnew Anew
2489  con 10 30 9.5 28.5 Aorig-Aorig*(Vtrasfer1/Vorig)-Aorig(Vtrasfer2/Vorig)
2490  brHf 5 20 4.7 18.8
2491  brCopp 2 20 1.6 16
2492  ash 0 0 0.6 4 Aorig1*Vtrasfer1/(Vorig1)+Aorig2*Vtrasfer2/(Vorig2)+...
2493  oak 0 0 0.6 2.7
2494  70 70
2495  */
2496  // first, before transfering areas, saving the original ones (we already saved the vols in the what="vol" section, that is called before this one)..
2497  vector<vector<vector<double>>> original_areas(pxC, vector<vector<double>>(nFTypes, vector<double>(nDC, 0.0))); // by px counter, ftype, dc
2498  for(uint i=0;i<fTypes.size();i++){
2499  for (uint u=0; u<dClasses.size(); u++){
2500  int pxC_loc = 0;
2501  for(uint ir=0;ir<regIds2.size();ir++){
2502  int r2 = regIds2[ir];
2503  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2504  regPx = REG->getMyPixels();
2505  for (uint p=0;p<regPx.size();p++){
2506  Pixel* px = regPx[p];
2507  original_areas[pxC_loc][i][u] += px->area_l[i][u];
2508  pxC_loc ++;
2509  }
2510  }
2511  }
2512  }
2513 
2514 
2515  // transferred areas ordered by pxcounter, i and then o ftype. Used to then repart the 0 diameter class..
2516  vector<vector<vector<double>>> transferred_areas(pxC, vector<vector<double>>(nFTypes, vector<double>(nFTypes, 0.0))); // initialize a 3d vector of nFTypes zeros.
2517 
2518  for(uint i=0;i<fTypes.size();i++){
2519  string fti = fTypes[i];
2520  for(uint o=0;o<fTypes.size();o++){
2521  string fto = fTypes[o];
2522  for (uint u=1; u<dClasses.size(); u++){ // first diameter class area is comuted proportionally..
2523  string layerName = "spInput#vol#"+fto+"#"+fti+"#"+i2s(u);
2524  if (MTHREAD->GIS->layerExist(layerName)){
2525  int pxC_loc = 0;
2526  for(uint ir=0;ir<regIds2.size();ir++){
2527  int r2 = regIds2[ir];
2528  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2529  regPx = REG->getMyPixels();
2530  for (uint p=0;p<regPx.size();p++){
2531  Pixel* px = regPx[p];
2532  double vol_i_orig = original_vols[pxC_loc][i][u];
2533  double vol_transfer = vol_i_orig?px->getDoubleValue(layerName,true)/1000000:0.0; // Vol in the layer are in m^3, in the model in Mm^3
2534  double area_i_orig = original_areas[pxC_loc][i][u];
2535  double area_transfer = vol_i_orig?area_i_orig*vol_transfer/vol_i_orig:0.0;
2536  px->area_l[i][u] -= area_transfer;
2537  px->area[i][u] = px->area_l[i][u];
2538  px->area_l[o][u] += area_transfer;
2539  px->area[o][u] = px->area_l[o][u];
2540  transferred_areas[pxC_loc][i][o] += area_transfer;
2541  pxC_loc ++;
2542  }
2543  }
2544  }
2545  }
2546  }
2547  }
2548 
2549  // Moving the area in the 0 diameter class, for which no info is normally available..
2550  double pxC_loc = 0;
2551  for(uint ir=0;ir<regIds2.size();ir++){
2552  int r2 = regIds2[ir];
2553  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2554  regPx = REG->getMyPixels();
2555  for (uint p=0;p<regPx.size();p++){
2556  Pixel* px = regPx[p];
2557  for(uint i=0;i<fTypes.size();i++){
2558  for(uint o=0;o<fTypes.size();o++){
2559  double area_i_orig = 0.0;
2560  for (uint u=1; u<dClasses.size(); u++){ // we want to skip the 0 diameter class, this is why we don't simply use vSum()..
2561  area_i_orig += original_areas[pxC_loc][i][u];
2562  }
2563  double area_transfer_u0 = area_i_orig?original_areas[pxC_loc][i][0]*(transferred_areas[pxC_loc][i][o]/area_i_orig):0.0;
2564  px->area_l[i][0] -= area_transfer_u0 ;
2565  px->area[i][0] = px->area_l[i][0];
2566  px->area_l[o][0] += area_transfer_u0 ; // bug corrected 20151130: it was 0 instead of o (output) !!
2567  px->area[o][0] = px->area_l[0][0]; // bug corrected 20151130: it was 0 instead of o (output) !!
2568  }
2569  }
2570  pxC_loc++;
2571  }
2572  }
2573 
2574  // Alligning the area memorised in the px layers to the new areas of the ft..
2575  for(uint i=0;i<fTypes.size();i++){
2576  string fti_id = fTypes[i];
2577  forType* fti = MTHREAD->MD->getForType(fti_id);
2578  int ft_memType = fti->memType;
2579  string ft_layerName = fti->forLayer;
2580  //if(ft_memType==3){
2581  // MTHREAD->GIS->addLayer(ft_layerName,ft_layerName,false,true); //20151130: no needed as we already added it in applyForestReclassification (yes, odd, as memory type 3 layerrs do not have any reclassification rule associated, but if I don't add the layer at that time I got other errors)
2582  // }
2583  if(ft_memType==3 ||ft_memType==2){
2584  for(uint ir=0;ir<regIds2.size();ir++){
2585  int r2 = regIds2[ir];
2586  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2587  regPx = REG->getMyPixels();
2588  for (uint p=0;p<regPx.size();p++){
2589  Pixel* px = regPx[p];
2590  double area_px = vSum(px->area[i]);
2591  px->changeValue(ft_layerName,area_px*10000);
2592  }
2593  }
2594  }
2595  }
2596  } // end if what is area
2597 }
2598 
2599 void
2601  // Print debug stats on inventory and supplies in each region..
2602  cout << "Printing debug information on initial regional inventories and supplies.." << endl;
2603  cout << "Reg\tProduct\t\tInv\tSt\tSa\tSl" << endl;
2604  for(uint r1=0;r1<l2r.size();r1++){
2605  for(uint r2c=0;r2c<l2r[r1].size();r2c++){
2606  for(uint p=0;p<priProducts.size();p++){
2607  int r2 = l2r[r1][r2c];
2608  double inv = gpd("in",r2,priProducts[p],secondYear);
2609  double st = gpd("st",r2,priProducts[p],secondYear);
2610  double sl = gpd("sl",r2,priProducts[p],secondYear);
2611  double sa = gpd("sa",r2,priProducts[p],secondYear);
2612  cout << r2 << "\t" << priProducts[p] << "\t\t" << inv << "\t" << st << "\t" << sl << "\t" << sa << endl;
2613  }
2614  }
2615  } // end of r1 region
2616  exit(0);
2617 
2618 }
2619 
2620 /**
2621  * @brief ModelCoreSpatial::allocateHarvesting
2622  * @param total_st vector of total supply by primary products
2623  * @return a vector of the remaining supply that goes allocated to alive timber (that is, to harvesting)
2624  *
2625  * The algorithm is such that is loops the deathTimberInventory map for each year (newer to older), dc (higher to smaller) and ft.
2626  * It compute the primary products allocable from that combination and allocate the cell amount to decrease the total_st of that products
2627  * in a proportional way to what still remain of the allocable products.
2628  *
2629  * It is called in the runMarketModule() function.
2630  *
2631  */
2632 
2633 vector <double>
2634 ModelCoreSpatial::allocateHarvesting(vector<double> total_st, const int &regId){
2635  if(!MD->getBoolSetting("useDeathTimber",DATA_NOW)) return total_st;
2636  //vector <double> st_fromHarv(priProducts.size(),0.);
2637  //map<iisskey, double > * deathTimberInventory= MD->getDeathTimberInventory();
2638  int maxYears = MD->getMaxYearUsableDeathTimber();
2639  int currentYear = MTHREAD->SCD->getYear();
2640  for(uint y = currentYear-1; y>currentYear-1-maxYears; y--){
2641  for (int u = dClasses.size()-1; u>=0; u--){ // I need to specify u as an integer !
2642  string dc = dClasses.at(u);
2643  for (uint f=0; f<fTypes.size(); f++){
2644  string ft = fTypes[f];
2645  vector<int>allocableProducts = MD->getAllocableProductIdsFromDeathTimber(regId, ft, dc, y, currentYear-1);
2646  iisskey key(y,regId,ft,dc);
2647  double deathTimber = MD->deathTimberInventory_get(key);
2648  double sum_total_st_allocable = 0;
2649  // Computing shares/weights or remaining st to allcate
2650  for(uint ap=0;ap<allocableProducts.size();ap++){
2651  sum_total_st_allocable += total_st.at(allocableProducts[ap]);
2652  }
2653  for(uint ap=0;ap<allocableProducts.size();ap++){
2654  double allocableShare = sum_total_st_allocable?total_st.at(allocableProducts[ap])/sum_total_st_allocable:0.0;
2655  double allocated = min(total_st[allocableProducts[ap]],deathTimber*allocableShare);
2656  MD->deathTimberInventory_incrOrAdd(key,-allocated);
2657  total_st[allocableProducts[ap]] -= allocated;
2658  }
2659  }
2660  }
2661  }
2662  return total_st;
2663 }
2664 
2665 
2666 /**
2667  * This function has two tasks:
2668  * 1) compute the policy balances (- -> public cost,subside; + -> public revenue, tax) for pol_mktDirInt_s, pol_mktDirInt_d, pol_trSub and pol_tcSub
2669  * (pol_fiSub are done directly in the runManagementModule() function)
2670  * 2) compute the producer/consumer surpluses
2671  */
2672 void
2674  //int thisYear = MTHREAD->SCD->getYear();
2675  int previousYear = MTHREAD->SCD->getYear()-1;
2676 
2677  for(uint r2=0; r2<regIds2.size();r2++){
2678  int regId = regIds2[r2];
2679  ModelRegion* reg = MD->getRegion(regId);
2680 
2681  for(uint p=0;p<priProducts.size();p++){
2682  double pol_mktDirInt_s = gpd("pol_mktDirInt_s",regId,priProducts[p]);
2683  double pol_mktDirInt_s_1 = gpd("pol_mktDirInt_s",regId,priProducts[p],previousYear);
2684  double sigma = gpd("sigma",regId,priProducts[p]);
2685  double in = gpd("in",regId,priProducts[p]);
2686  double in_1 = gpd("in",regId,priProducts[p], previousYear);
2687  double pc = gpd("pc",regId,priProducts[p]);
2688  double pc_1 = gpd("pc",regId,priProducts[p], previousYear);
2689  double sc = gpd("sc",regId,priProducts[p]);
2690  double sc_1 = gpd("sc",regId,priProducts[p],previousYear);
2691  double gamma_incr = gpd("gamma_incr",regId,priProducts[p]); // elast supply to increasing stocks
2692  double gamma_decr = gpd("gamma_decr",regId,priProducts[p]); // elast supply to decreasing stocks
2693 
2694  double gamma = in>in_1 ? gamma_incr: gamma_decr;
2695  double polBal_mktDirInt_s = pc * (1-pol_mktDirInt_s) * sc;
2696  double surplus_prod = pc * sc -
2697  pc_1
2698  * pol_mktDirInt_s_1
2699  * pow(pol_mktDirInt_s,-1)
2700  * pow(sc_1,-1/sigma)
2701  * pow(in_1/in,gamma/sigma)
2702  * (sigma/(sigma+1))
2703  * pow(sc,(sigma+1)/sigma);
2704 
2705  spd(polBal_mktDirInt_s,"polBal_mktDirInt_s",regId,priProducts[p],DATA_NOW,true); // M€
2706  spd(surplus_prod,"surplus_prod",regId,priProducts[p],DATA_NOW,true);
2707  }
2708  for(uint p=0;p<secProducts.size();p++){
2709  double pol_mktDirInt_d = gpd("pol_mktDirInt_d",regId,secProducts[p]);
2710  double pol_mktDirInt_d_1 = gpd("pol_mktDirInt_d",regId,secProducts[p],previousYear);
2711  double sigma = gpd("sigma",regId,secProducts[p]);
2712  double pol_trSub = gpd("pol_trSub",regId,secProducts[p]);
2713  double pc = gpd("pc",regId,secProducts[p]);
2714  double pc_1 = gpd("pc",regId,secProducts[p],previousYear);
2715  double dc = gpd("dc",regId,secProducts[p]);
2716  double dc_1 = gpd("dc",regId,secProducts[p],previousYear);
2717  double dc0 = gpd("dc",regId,secProducts[p],secondYear);
2718  double m = gpd("m",regId,secProducts[p]);
2719  double sl = gpd("sl",regId,secProducts[p]); // sl because dl include also the production from byproduct that has not subsides in the model
2720 
2721  double polBal_mktDirInt_d = pc * (pol_mktDirInt_d - 1) * dc;
2722  double polBal_trSub = m * (pol_trSub-1) * sl;
2723  double trs = 0.5; // 0.1: we consider 90 percent of the demand integral
2724  double surplus_cons = pc_1
2725  * pol_mktDirInt_d_1
2726  * pow(dc_1,-1/sigma)
2727  * pow(pol_mktDirInt_d,-1)
2728  * (sigma/(sigma+1))
2729  * (pow(dc,(sigma+1)/sigma) - pow(trs*dc0,(sigma+1)/sigma))
2730  - (dc - trs * dc0) * pc ;
2731 
2732  spd(polBal_mktDirInt_d,"polBal_mktDirInt_d",regId,secProducts[p],DATA_NOW,true); // M€
2733  spd(polBal_trSub,"polBal_trSub",regId,secProducts[p],DATA_NOW,true); // M€
2734  spd(surplus_cons,"surplus_cons",regId,secProducts[p],DATA_NOW,true); // M€
2735  }
2736 
2737  for(uint p=0;p<allProducts.size();p++){
2738  double pol_tcSub = gpd("pol_tcSub",regId,allProducts[p]);
2739  double polBal_tcSub = 0;
2740  vector<ModelRegion*> siblings = reg->getSiblings();
2741  for(uint rTo=0;rTo<siblings.size();rTo++){
2742  int regId2 = siblings[rTo]->getRegId();
2743  double ct = gpd("ct",regId,allProducts[p],DATA_NOW,i2s(regId2));
2744  double rt = gpd("rt",regId,allProducts[p],DATA_NOW,i2s(regId2));
2745  polBal_tcSub += ct*(pol_tcSub-1) * rt;
2746  }
2747  spd(polBal_tcSub,"polBal_tcSub",regId,allProducts[p],DATA_NOW,true); // M€
2748  }
2749 
2750  }
2751 
2752 }
2753 
2754 /**
2755  * Return the average age using cumTp vector
2756  */
2757 double
2759  if(dc == dClasses.size()-1) { return px->cumTp[ft][dc] * 1.2;}
2760  else {return (px->cumTp[ft][dc]+px->cumTp[ft][dc+1])/2; }
2761 }
2762 
2763 
2764 
2765 
2766 // added for carbon project
2767 
2768 double
2769 ModelCoreSpatial::getVHaByYear(const Pixel* px, const int& ft, const int& year, const double& extraBiomass_ratio, const int& regId ) const {
2770  // This function computes and send back the expected volume/ha in a forest type after a specific amount of years
2771  // It uses expected volumes/ha at each diameter class, and makes a linear interpolation between the corresponding times of passage
2772  // Ancient version was a "stairs" function of time, now it is a succession of linear approximations
2773 
2774  double vHa = 0;
2775  int thisYear = MTHREAD->SCD->getYear();
2776 
2777  // This loop goes through all diameter classes to find the last one that was reached at year tested
2778  for (int u = 0; u<dClasses.size(); u++){
2779  // We retrieve the expected time of passage for the current diameter class
2780  // and we compare it to the year tested
2781  double cumTP = px->cumTp_exp[ft][u];
2782  if (year>=cumTP) {
2783  // Forest has already reached this class,
2784  // We do nothing and go to the next one
2785  }
2786  else {// Forest has not reached this class yet, we stop here
2787  // Assumption : Volume growth is a linear approximation between volume at previous DC and at current DC
2788  // There would be a problem if "year" was negative, but it should not happen since year starts at 0
2789  double cumTP_plus = px->cumTp_exp[ft][u];
2790  double cumTP_minus = px->cumTp_exp[ft][u-1];
2791  double volHa_plus = px->vHa_exp[ft][u];
2792  double volHa_minus = px->vHa_exp[ft][u-1];
2793  double slope = (volHa_plus - volHa_minus)/(cumTP_plus - cumTP_minus);
2794 
2795  vHa = (volHa_minus + (year-cumTP_minus)*slope)*(1+extraBiomass_ratio);
2796  return vHa;
2797  }
2798  }
2799  // if we arrive here, we have exited the loop
2800  // and the forest has reached the last diameter class
2801  int dc = dClasses.size()-1; // index of the last diameter class
2802  double mortCoeff = gfd("mort",regId,fTypes[ft],dClasses[dc],thisYear); // mortality rate of the last diameter class
2803  double cumTP_max = px-> cumTp_exp[ft][dc]; // cumulative TP to reach last diameter class
2804  double time_elapsed = year - cumTP_max; // time since the last class was reached
2805  double volHa_max = px->cumTp_exp[ft][dc]; // volume/ha when the last class was reached
2806 
2807  // Now we compute the volume/ha at the year tested
2808  // Assumption: trees don't grow anymore but slowly die according to mortality rate
2809  vHa = volHa_max*pow(1-mortCoeff,time_elapsed)*(1+extraBiomass_ratio);
2810  return vHa;
2811 } // end of function
2812 
2813 
2814 
2815 /** ANCIENT VERSION OF FUNCTION. VOLUME WAS A STAIRS FUNCTION OF TIME
2816  * Return the Volume/ha in a forest after a given number of year after planting, for a specific forest type
2817 
2818 double
2819 ModelCoreSpatial::getVHaByYear(const Pixel* px, const int& ft, const int& year, const double& extraBiomass_ratio) const {
2820  double vHa = 0;
2821  //int regId = px->getMyRegion()->getRegId();
2822  //double extraBiomass_ratio = gfd("extraBiomass_ratio",regId,fTypes[ft],"",DATA_NOW);
2823 
2824  for (int u = 0; u<dClasses.size(); u++){
2825  double cumTP_exp = px->cumTp_exp[ft][u];
2826  if (year>=cumTP_exp){
2827  vHa = px->vHa_exp[ft][u]*(1+extraBiomass_ratio);
2828  } else {
2829  return vHa;
2830  }
2831  }
2832  return vHa;
2833 }
2834 */
void updateImage(string layerName_h)
Add one layer to the system.
Definition: Gis.cpp:700
vector< vector< double > > deltaArea
Definition: Pixel.h:108
void setSpModifier(const double &value, const int &ftindex)
Definition: Pixel.h:87
Class to provide a simple integer-integer-string-string key in std maps.
Definition: BaseClass.h:213
void advanceYear()
Definition: Scheduler.h:51
int getPos(const K &element, const vector< K > &v, const int &msgCode_h=MSG_CRITICAL_ERROR)
Definition: BaseClass.h:421
Print an ERROR message, but don&#39;t stop the model.
Definition: BaseClass.h:61
The required data is for the current year.
Definition: BaseClass.h:73
void runBiologicalModule()
computes hV, hArea and new vol at end of year
vector< int > getAllocableProductIdsFromDeathTimber(const int &regId_h, const string &ft, const string &dc, const int &harvesting_year, int request_year=DATA_NOW)
Returns the ids of the primary products that is possible to obtain using the timber recorded death in...
Definition: ModelData.cpp:2105
double getArea(const string &fType_h, const string &dClass_h)
Get area by ft and dc (from pixel->area matrix)
void initialiseDeathBiomassStocks(const vector< double > &deathByFt, const int &regId)
Initialises the stocks of death biomass for the avgLive_* years before the simulation starts...
Definition: Carbon.cpp:169
int getIntSetting(const string &name_h, int position=0, int reg=WORLD) const
Definition: ModelData.cpp:1105
vector< vector< int > > l2r
vector< vector< double > > area
Definition: Pixel.h:107
vector< string > dClasses
vector< string > getForTypeParents()
Definition: ModelData.cpp:122
vector< vector< double > > cumAlive_exp
This is the expected version of cumAlive, used for calculating profits.
Definition: Pixel.h:139
bool getBoolSetting(const string &name_h, int position=0, int reg=WORLD) const
Definition: ModelData.cpp:1117
void runManagementModule()
computes regArea and expectedReturns
Do not actually output any message.
Definition: BaseClass.h:57
void initializePixelVolumes()
distribuite regional exogenous volumes to pixel volumes using corine land cover area as weight ...
vector< vector< int > > createCombinationsVector(const int &nItems)
Return a vector containing any possible combination of nItems items (including any possible subset)...
Definition: ModelData.cpp:2055
vector< Pixel * > regPx
void sumRegionalForData()
computes vol, hV, harvestedArea, regArea and expReturns at reg level from the pixel level ...
Forest types (struct)
Definition: ModelData.h:293
void spd(const double &value_h, const string &type_h, const int &regId_h, const string &prodId_h, const int &year=DATA_NOW, const bool &allowCreate=false, const string &freeDim_h="") const
string i2s(const int &int_h) const
integer to string conversion
Definition: BaseClass.cpp:219
ThreadManager * MTHREAD
Pointer to the Thread manager.
Definition: BaseClass.h:467
K getMax(const vector< K > &v)
Returns the value of the maximum element in the vector (the last one in case of multiple equivalent m...
Definition: BaseClass.h:391
double getAvailableDeathTimber(const vector< string > &primProd_h, int regId_h, int year_h)
Returns the timber available for a given set of primary products as stored in the deathTimberInventor...
Definition: ModelData.cpp:2076
bool layerExist(const string &layerName_h, bool exactMatch=true) const
Return a pointer to a layer given its name.
Definition: Gis.cpp:552
void changeValue(const string &layerName_h, const double &value_h, const bool &setNoValueForZero=false)
Change the value of an existing layerMTHREAD->GIS->pack(parName, forName, dClass, year)...
Definition: Pixel.cpp:135
double getValue(string layerName, int op=OP_SUM)
return the values of its own pixels for the specified layer. Possible operations: OP_SUM or OP_AVG ...
double calculateAnnualisedEquivalent(const double &amount_h, const int &years_h, const double &ir) const
Calculate the annual equivalent flow.
Definition: ModelData.cpp:1961
ModelData * MD
the model data object
Definition: ThreadManager.h:72
void resetMapValues(map< K, V > &mymap, const V &value)
Reset all values stored in a map to the specified one.
Definition: BaseClass.h:341
void loadExogenousForestLayers(const string &what)
Set pixel volumes (what="vol") OR areas (what="area") by specific forest types as defined in gis laye...
int memType
Definition: ModelData.h:296
double expType
Sampling derived expectation types of this agent (forest bilogical parameters: growth, mortality)
Definition: Pixel.h:143
vector< double > avalCoef
Availability (of wood resources) coefficient. A [0,1] coefficient (new: by forest type) that reduces ...
Definition: Pixel.h:146
vector< string > priProducts
double getAvailableAliveTimber(const vector< string > &primProd_h, int regId_h)
Returns the timber available for a given set of primary products as stored in the px->vol_l vector...
Definition: ModelData.cpp:2124
vector< vector< double > > mort
Definition: Pixel.h:132
void printOptLog(bool optimal, int &nIterations, double &obj)
Definition: Output.cpp:829
Scheduler * SCD
the scheduler object (simulation-loops scheduler)
Definition: ThreadManager.h:75
vector< vector< double > > vMort
Definition: Pixel.h:118
vector< T > positionsToContent(const vector< T > &vector_h, const vector< int > &positions)
Return a vector of content from a vector and a vector of positions (int)
Definition: BaseClass.h:359
vector< vector< double > > vol_l
store the volumes of the previous year
Definition: Pixel.h:128
void msgOut(const int &msgCode_h, const string &msg_h, const bool &refreshGUI_h=true) const
Overloaded function to print the output log.
Definition: BaseClass.cpp:50
vector< double > expectedAnnualIncome_carbon
Definition: Pixel.h:123
Gis * GIS
GIS information and methods.
Definition: ThreadManager.h:73
Thread manager. Responsable to manage the main thread and "speak" with the GUI.
Definition: ThreadManager.h:65
void incrMapValue(map< K, V > &mymap, const K &key, const V &value, const int &error_level=MSG_CRITICAL_ERROR)
Increments a value stored in a map of the specified value, given the key.
Definition: BaseClass.h:312
vector< string > getStringVectorSetting(const string &name_h, int reg=WORLD) const
Definition: ModelData.cpp:1129
Perform a SUM operation.
Definition: BaseClass.h:77
void resetPixelValues()
swap volumes->lagged_volumes and reset the other pixel vectors
double deathTimberInventory_get(const iisskey &thekey)
Definition: ModelData.h:197
vector< vector< double > > vHa_exp
This is the expected version of vHa, used for calculating profits.
Definition: Pixel.h:138
vector< string > pDClasses
vector< double > initialDc0Area
Definition: Pixel.h:109
vector< vector< double > > cumAlive
Cumulative prob of remaining alive at beginnin of a given diam class.
Definition: Pixel.h:136
vector< int > getForTypeChilds_pos(const string &forTypeId_h, bool all=false)
Definition: ModelData.cpp:109
int getMaxPos(const vector< K > &v)
Returns the position of the maximum element in the vector (the last one in case of multiple equivalen...
Definition: BaseClass.h:383
void computeInventary()
in=f(vol_t-1)
vector< vector< double > > vHa
Volume at hectar by each diameter class [m^3/ha].
Definition: Pixel.h:135
vector< double > inResByAnyCombination
Vector of inventory resource for each possible combination of primary products. This store both alive...
Definition: ModelRegion.h:86
string getStringSetting(const string &name_h, int position=0, int reg=WORLD) const
Definition: ModelData.cpp:1113
void runMarketModule()
computes st (supply total) and pw (weighted price). Optimisation inside.
void registerTransports(const double &distQ, const int &regId)
Registers the quantities emitted by transport of wood FROM a given region.
Definition: Carbon.cpp:277
int getNForTypesChilds(const string &forTypeId_h)
Definition: ModelData.cpp:135
void cacheSettings()
just cache exogenous settings from ModelData
Print an error message and stop the model.
Definition: BaseClass.h:62
void registerDeathBiomass(const double &value, const int &regId, const string &fType)
Registers the "death" of a given amount of biomass, storing it in the deathBiomass map...
Definition: Carbon.cpp:243
double getSTData(const string &parName, const string &forName, int year, const string &d2="", double naToReturn=RETNA)
Definition: Pixel.cpp:246
Output * DO
data output
Definition: ThreadManager.h:76
vector< int > optDcChosen
Definition: Pixel.h:122
double getDoubleValue(const string &layerName_h, const bool &returnZeroForNoValue=false) const
Return the value for a specific layer.
Definition: Pixel.cpp:158
string d2s(const double &double_h) const
double to string conversion
Definition: BaseClass.cpp:229
vector< double > expectedAnnualIncome_timber
Definition: Pixel.h:124
const double getForData(const string &type_h, const int &regId_h, const string &forType_h, const string &freeDim_h, const int &year=DATA_NOW)
Definition: ModelData.cpp:1281
void updateOtherMapData()
update (if the layer exists) other gis-based data, as volumes and expected returns, taking them from the data in the px object
int vSum(const vector< int > &vector_h) const
Definition: BaseClass.h:276
bool getTempBool()
Definition: ModelData.h:145
int getYear()
Definition: Scheduler.h:49
void initialiseDeathTimber()
Set deathTimberInventory to zero for the previous years (under the hipotesis that we don&#39;t have advan...
double getPastRegArea(const int &ft_idx, const int &year)
Definition: Pixel.cpp:390
vector< string > fTypes
Pixel-level class.
Definition: Pixel.h:47
vector< vector< double > > hProductivity
Definition: Pixel.h:112
void initialiseProductsStocks(const vector< double > &qByProduct, const int &regId)
Initialises the stocks of products for the avgLive_* years before the simulation starts.
Definition: Carbon.cpp:203
void setTempBool(bool tempBool_h)
Definition: ModelData.h:146
vector< vector< double > > vol
Definition: Pixel.h:90
double getVHaByYear(const Pixel *px, const int &ft, const int &year, const double &extraBiomass_ratio, const int &regId) const
return the Volume/ha in a forest after a given number of year after planting, for a specific forest t...
double getMultiplier(const string &multiplierName, const string &forName, int year=DATA_NOW)
Definition: Pixel.cpp:184
vector< string > allProducts
vector< string > secProducts
vector< vector< double > > cumTp_exp
This is the expected version of cumTp, used for calculating profits.
Definition: Pixel.h:137
double expTypePrices
Sampling derived expectation types of this agent (prices)
Definition: Pixel.h:144
void registerHarvesting(const double &value, const int &regId, const string &fType)
Registers the harvesting of trees increasing the value of cumEmittedForOper.
Definition: Carbon.cpp:225
Print a debug message, normally filtered out.
Definition: BaseClass.h:58
Ipopt::SmartPtr< Ipopt::TNLP > OPT
Market optimisation.
Definition: ThreadManager.h:81
Carbon * CBAL
Module for the Carbon Balance.
Definition: ThreadManager.h:79
vector< vector< vector< double > > > hVol_byPrd
Definition: Pixel.h:113
Print a WARNING message.
Definition: BaseClass.h:60
vector< vector< double > > area_l
store the areas of the previous year
Definition: Pixel.h:129
vector< Pixel * > getAllPlotsByRegion(ModelRegion &region_h, bool shuffle=false)
Return the vector of all plots by a specific region (main region or subregion), optionally shuffled;...
Definition: Gis.cpp:855
string getForTypeParentId(const string &forTypeId_h)
Definition: ModelData.cpp:90
vector< string > getForTypeIds(bool all=false)
By default it doesn&#39;t return forTypes used only as input.
Definition: ModelData.cpp:430
double getSd(const vector< K > &v, bool sample=true)
Definition: BaseClass.h:408
double gpd(const string &type_h, const int &regId_h, const string &prodId_h, const int &year=DATA_NOW, const string &freeDim_h="") const
map< K, V > vectorToMap(const vector< K > &keys, const V &value=0.0)
Returns a map built using the given vector and the given (scalar) value as keys/values pairs...
Definition: BaseClass.h:349
vector< int > optFtChosen
Definition: Pixel.h:121
vector< int > optDc
Definition: Pixel.h:120
vector< double > expectedReturns
Definition: Pixel.h:119
V findMap(const map< K, V > &mymap, const K &key, const int &error_level=MSG_CRITICAL_ERROR, const V &notFoundValue=numeric_limits< V >::min()) const
Lookup a map for a value. Return the value starting from the key.
Definition: BaseClass.h:286
vector< vector< double > > tp
Definition: Pixel.h:133
double getPathMortality(const string &forType, const string &dC, int year=DATA_NOW)
Return the INCREASED mortality due to pathogen presence for a given ft and dc in a certain year (defa...
Definition: Pixel.cpp:329
ModelCoreSpatial(ThreadManager *MTHREAD_h)
double getAvgAgeByDc(Pixel *px, int ft, int dc)
return the average age of a tree at a given diameter class, using the cumTp vector ...
int getErrorLevel()
Definition: ModelData.h:144
void swap(const int &swap_what)
Assign to the delayed value the current values, e.g. vol_l = vol.
Definition: Pixel.cpp:422
string forLayer
Definition: ModelData.h:297
vector< ModelRegion * > getSiblings(bool excludeResidual=true)
Return a vector of pointers to the siblings regions.
Definition: ModelRegion.cpp:85
void printDetailedHV(map< tr1::array< string, 4 >, double > hVol_byPrd)
Definition: Output.cpp:1147
vector< double > expectedReturnsNotCorrByRa
by ft. Attenction, reported expReturns at "forest" level (compared with those at forest type level) d...
Definition: Pixel.h:126
vector< vector< double > > hVol
Definition: Pixel.h:111
void incrOrAddMapValue(map< K, V > &mymap, const K &key, const V &value)
Increments a value stored in a map of the specified value, given the key.
Definition: BaseClass.h:325
void initialiseEmissionCounters()
Initialises the emission counters to zero.
Definition: Carbon.cpp:158
void setErrorLevel(int errorLevel_h)
Definition: ModelData.h:143
void initMarketModule()
computes st and pw for second year and several needed-only-at-t0-vars for the market module ...
vector< int > getRegionIds(int level_h, bool excludeResidual=true)
Definition: ModelData.cpp:366
double computeExpectedPrice(const double &curLocPrice, const double &worldCurPrice, const double &worldFutPrice, const double &sl, const double &sa, const double &expCoef)
Compute weighted expected price for a given product.
void print(bool earlyPrint=true)
Print output. If earlyPrinting it doesn&#39;t print some stuff for which we don&#39;t yet have values...
Definition: Output.cpp:426
void HWP_eol2energy()
Computes the energy substitution for the quota of HWP that reachs end of life and doesn&#39;t go to landf...
Definition: Carbon.cpp:288
void initializePixelArea()
compute px->area for each ft and dc
vector< int > regIds2
double getDoubleSetting(const string &name_h, int position=0, int reg=WORLD) const
Definition: ModelData.cpp:1109
vector< vector< double > > cumTp
This is time of passage to REACH a diameter class (while the exogenous tp by diameter class is the ti...
Definition: Pixel.h:134
vector< vector< double > > hArea
Definition: Pixel.h:110
void sfd(const double &value_h, const string &type_h, const int &regId_h, const string &forType_h, const string &freeDim_h, const int &year=DATA_NOW, const bool &allowCreate=false) const
string getRegSName() const
Definition: ModelRegion.h:67
vector< double > vReg
Definition: Pixel.h:117
double gfd(const string &type_h, const int &regId_h, const string &forType_h, const string &freeDim_h, const int &year=DATA_NOW) const
bool app(const string &prod_h, const string &forType_h, const string &dClass_h) const
vector< vector< double > > beta
Definition: Pixel.h:131
vector< Pixel * > getMyPixels()
Definition: ModelRegion.h:86
Print an INFO message.
Definition: BaseClass.h:59
void assignSpMultiplierPropToVols()
ModelCoreSpatial::assignSpMultiplierPropToVols assigns the spatial multiplier (used in the time of re...
ModelRegion * getRegion(int regId_h)
Definition: ModelData.cpp:346
void cacheDynamicSettings()
cache settings that may change with time
void initialiseCarbonModule()
call initialiseDeathBiomassStocks(), initialiseProductsStocks() and initialiseEmissionCounters() ...
void cachePixelExogenousData()
computes pixel level tp, meta and mort
double getAvg(const vector< K > &v)
Returns the average of the elements in the vector.
Definition: BaseClass.h:399
#define DIAM_ALL
All diameter classes.
Definition: BaseClass.h:157
void updateMapAreas()
computes forArea_{ft}
void deathTimberInventory_incrOrAdd(const iisskey &thekey, double value_h)
Definition: ModelData.h:195
void registerCarbonEvents()
call registerHarvesting(), registerDeathBiomass(), registerProducts() and registerTransports() ...
vector< string > getForTypeChilds(const string &forTypeId_h)
Definition: ModelData.cpp:98
void computeCumulativeData()
computes cumTp_exp, vHa_exp, vHa
void registerProducts(const double &value, const int &regId, const string &productName)
Registers the production of a given amount of products, storing it in the products maps...
Definition: Carbon.cpp:257
double getID() const
Definition: Pixel.h:67
void printDebugInitRegionalValues()
print initial inv, st, sl and sa in each region
void computeEconomicBalances()
compute the policy balances (- -> costs; + -> rev) and the producer/consumer surpluses ...
forType * getForType(int position)
Definition: ModelData.h:128
map< int, vector< double > > regArea
Definition: Pixel.h:114
const int getMaxYearUsableDeathTimber(const string &prod_h, const string &forType_h, const string &dClass_h)
Definition: ModelData.cpp:468
vector< double > allocateHarvesting(vector< double > total_st, const int &regId)
Using the deathTimberInventory map, this function allocate the total st in st from death timber (that...