FFSM++  1.1.0
French Forest Sector Model ++
ModelCoreSpatial.cpp
Go to the documentation of this file.
1 /***************************************************************************
2  * Copyright (C) 2015 by Laboratoire d'Economie Forestière *
3  * http://ffsm-project.org *
4  * *
5  * This program is free software; you can redistribute it and/or modify *
6  * it under the terms of the GNU General Public License as published by *
7  * the Free Software Foundation; either version 3 of the License, or *
8  * (at your option) any later version, given the compliance with the *
9  * exceptions listed in the file COPYING that is distribued together *
10  * with this file. *
11  * *
12  * This program is distributed in the hope that it will be useful, *
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of *
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
15  * GNU General Public License for more details. *
16  * *
17  * You should have received a copy of the GNU General Public License *
18  * along with this program; if not, write to the *
19  * Free Software Foundation, Inc., *
20  * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
21  ***************************************************************************/
22 #include <cmath>
23 #include <algorithm>
24 #include <tr1/array>
25 
26 
27 #include "IpIpoptApplication.hpp"
28 #include "IpSolveStatistics.hpp"
29 
30 #include "ModelCoreSpatial.h"
31 #include "ModelData.h"
32 #include "ThreadManager.h"
33 #include "Opt.h"
34 #include "Scheduler.h"
35 #include "Gis.h"
36 #include "Carbon.h"
37 
38 
40  MTHREAD = MTHREAD_h;
41 }
42 
44 
45 }
46 
47 void
49  cacheSettings(); ///< cashe things like first year, second year, dClasses...
50  cacheDynamicSettings(); // cache settings that may change every year
51  initializePixelVolumes(); ///< compute px volumes vol for 2005 (including exogenous loaded volumes)
52  assignSpMultiplierPropToVols(); // assign the spatial multiplier (used in the time of return) based no more on a Normal distribution but on the volumes present in the pixel: more volume, more the pixel is fit for the ft
53  initMarketModule(); ///< inside it uses first year, second year
55  MTHREAD->DO->print(true);
56  MTHREAD->SCD->advanceYear(); ///< 2005->2006
57  int thisYear = MTHREAD->SCD->getYear(); // for debugging
58  resetPixelValues(); ///< swap volumes->lagged_volumes and reset the other pixel vectors
59  cachePixelExogenousData(); ///< compute pixel tp, meta and mort
60  computeInventary(); ///< in=f(vol_t-1)
61  //printDebugInitRegionalValues();
62  computeCumulativeData(); ///< compute cumTp_exp, vHa_exp, vHa
63  initializePixelArea(); ///< compute px->area for each ft and dc (including exogenous loaded areas)
66  updateMapAreas(); ///< update the forArea_{ft} layer on each pixel as old value-hArea+regArea
67  updateOtherMapData(); ///< update (if the layer exists) other gis-based data, as volumes and expected returns, taking them from the data in the px object
68  sumRegionalForData(); ///< only for printing stats as forest data is never used at regional level
70  //MTHREAD->DO->printDebugPixelValues(); // uncomment to enable pixel-level debugging // moved to output->print
71 
72  MTHREAD->DO->print();
73 }
74 
75 void
77  int thisYear = MTHREAD->SCD->getYear(); // for debugging
78  cacheDynamicSettings(); // cache settings that may change every year
79  resetPixelValues(); // swap volumes->lagged_volumes and reset the other pixel vectors
80  cachePixelExogenousData(); // compute pixel tp, meta and mort
81  computeInventary(); // in=f(vol_t-1)
82  runMarketModule(); // RUN THE MARKET OPTIMISATION HERE
83  computeCumulativeData(); // compute cumTp_exp, vHa_exp
87  //MTHREAD->DO->printDebugPixelValues(); // uncomment to enable pixel-level debugging // moved to output->print
89  updateOtherMapData(); // update (if the layer exists) other gis-based data, as volumes and expected returns, taking them from the data in the px object
90  sumRegionalForData(); // only for printing stats as forest data is never used at regional level
92  computeEconomicBalances(); // policy costs and welfare analysis
93  MTHREAD->DO->print();
94 }
95 
96 void
98  msgOut(MSG_INFO, "Starting market module (init stage)..");
99 
100  for(uint i=0;i<regIds2.size();i++){
101  int r2 = regIds2[i];
102  //RPAR('pl',i,p_tr,t-1) = sum(p_pr, a(p_pr,p_tr)*RPAR('pl',i,p_pr,t-1))+m(i,p_tr);
103  for(uint sp=0;sp<secProducts.size();sp++){
104  double value = 0;
105  for (uint pp=0;pp<priProducts.size();pp++){
106  value += gpd("pl",r2,priProducts[pp],secondYear)*
107  gpd("a",r2,priProducts[pp],secondYear,secProducts[sp]);
108  }
109  value += (gpd("m",r2,secProducts[sp],secondYear)* gpd("pol_trSub",r2,secProducts[sp],secondYear));
110  spd(value,"pl",r2,secProducts[sp],secondYear,true);
111  }
112  // RPAR('dl',i,p_pr,t-1) = sum(p_tr, a(p_pr,p_tr)*RPAR('sl',i,p_tr,t-1));
113  for (uint pp=0;pp<priProducts.size();pp++){
114  double value=0;
115  for(uint sp=0;sp<secProducts.size();sp++){
116  value += gpd("sl",r2,secProducts[sp],secondYear)*
117  gpd("a",r2,priProducts[pp],secondYear, secProducts[sp]);
118  }
119  spd(value,"dl",r2,priProducts[pp],secondYear,true);
120  double stvalue = gpd("sl",r2,priProducts[pp],secondYear)
121  + gpd("sa",r2,priProducts[pp],secondYear);
122  spd(stvalue,"st",r2,priProducts[pp],secondYear,true);
123  double supply_fromForestShare = gpd("supply_fromForestShare",r2,priProducts[pp],secondYear);
124  spd(stvalue*supply_fromForestShare,"st_fromHarv",r2,priProducts[pp],secondYear,true); // needed for the biological module in year starty+1
125  }
126  // RPAR('st',i,prd,t-1) = RPAR('sl',i,prd,t-1)+RPAR('sa',i,prd,t-1);
127  // RPAR('dt',i,prd,t-1) = RPAR('dl',i,prd,t-1)+RPAR('da',i,prd,t-1);
128  for (uint ap=0;ap<allProducts.size();ap++){
129  //double debug = gpd("dl",r2,allProducts[ap],secondYear);
130  double dtvalue = gpd("dl",r2,allProducts[ap],secondYear)
131  + gpd("da",r2,allProducts[ap],secondYear);
132 
133  spd(dtvalue,"dt",r2,allProducts[ap],secondYear,true);
134  }
135 
136  // q1(i,p_tr) = 1/(1+((RPAR('dl',i,p_tr,t-1)/RPAR('da',i,p_tr,t-1))**(1/psi(i,p_tr)))*(RPAR('pl',i,p_tr,t-1)/PT(p_tr,t-1)));
137  // p1(i,p_tr) = 1-q1(i,p_tr);
138  // RPAR('dc',i,p_tr,t-1) = (q1(i,p_tr)*RPAR('da',i,p_tr,t-1)**((psi(i,p_tr)-1)/psi(i,p_tr))+ p1(i,p_tr)*RPAR('dl',i,p_tr,t-1)**((psi(i,p_tr)-1)/psi(i,p_tr)))**(psi(i,p_tr)/(psi(i,p_tr)-1));
139  // RPAR('pc',i,p_tr,t-1) = (RPAR('da',i,p_tr,t-1)/RPAR('dc',i,p_tr,t-1))*PT(p_tr,t-1)+(RPAR('dl',i,p_tr,t-1)/RPAR('dc',i,p_tr,t-1))*RPAR('pl',i,p_tr,t-1);
140  // RPAR('pc',i,p_pr,t-1) = (RPAR('sa',i,p_pr,t-1)/RPAR('sc',i,p_pr,t-1))*PT(p_pr,t-1)+(RPAR('sl',i,p_pr,t-1)/RPAR('sc',i,p_pr,t-1))*RPAR('pl',i,p_pr,t-1);
141  // RPAR('pw',i,p_tr,t-1) = (RPAR('dl',i,p_tr,t-1)*RPAR('pl',i,p_tr,t-1)+RPAR('da',i,p_tr,t-1)*PT(p_tr,t-1))/RPAR('dt',i,p_tr,t-1) ; //changed 20120419
142  // K(i,p_tr,t-1) = k1(i,p_tr)*RPAR('sl',i,p_tr,t-1);
143  for(uint sp=0;sp<secProducts.size();sp++){
144  double psi = gpd("psi",r2,secProducts[sp],secondYear);
145  double dl = gpd("dl",r2,secProducts[sp],secondYear);
146  double da = gpd("da",r2,secProducts[sp],secondYear);
147  double pl = gpd("pl",r2,secProducts[sp],secondYear);
148  double sa = gpd("sa",r2,secProducts[sp],secondYear);
149  double sl = gpd("sl",r2,secProducts[sp],secondYear);
150  double k1 = gpd("k1",r2,secProducts[sp],secondYear);
151  double pWo = gpd("pl",WL2,secProducts[sp],secondYear); // World price (local price for region 99999)
152 
153  double stvalue = sl+sa;
154  double q1 = 1/ ( 1+pow(dl/da,1/psi)*(pl/pWo) );
155  double p1 = 1-q1;
156  double dc = pow(
157  q1*pow(da,(psi-1)/psi) + p1*pow(dl,(psi-1)/psi)
158  ,
159  psi/(psi-1)
160  );
161  double pc = (da/dc)*pWo
162  +(dl/dc)*pl;
163  double pw = (dl*pl+da*pWo)/(dl+da);
164  double k = k1*sl;
165 
166  spd(stvalue,"st",r2,secProducts[sp],secondYear,true);
167  spd(q1,"q1",r2,secProducts[sp],firstYear,true);
168  //spd(p1,"p1",r2,secProducts[sp],firstYear,true);
169  spd(dc,"dc",r2,secProducts[sp],secondYear,true);
170  spd(pc,"pc",r2,secProducts[sp],secondYear,true);
171  spd(pw,"pw",r2,secProducts[sp],secondYear,true);
172  spd(k,"k",r2,secProducts[sp],secondYear,true);
173  }
174 
175  // t1(i,p_pr) = 1/(1+((RPAR('sl',i,p_pr,t-1)/RPAR('sa',i,p_pr,t-1))**(1/eta(i,p_pr)))*(RPAR('pl',i,p_pr,t-1)/PT(p_pr,t-1)));
176  // r1(i,p_pr) = 1-t1(i,p_pr);
177  // RPAR('sc',i,p_pr,t-1) = (t1(i,p_pr)*RPAR('sa',i,p_pr,t-1)**((eta(i,p_pr)-1)/eta(i,p_pr))+ r1(i,p_pr)*RPAR('sl',i,p_pr,t-1)**((eta(i,p_pr)-1)/eta(i,p_pr)))**(eta(i,p_pr)/(eta(i,p_pr)-1))
178  // RPAR('pc',i,p_pr,t-1) = (RPAR('sa',i,p_pr,t-1)/RPAR('sc',i,p_pr,t-1))*PT(p_pr,t-1)+(RPAR('sl',i,p_pr,t-1)/RPAR('sc',i,p_pr,t-1))*RPAR('pl',i,p_pr,t-1);
179  // RPAR('pw',i,p_pr,t-1) = (RPAR('sl',i,p_pr,t-1)*RPAR('pl',i,p_pr,t-1)+RPAR('sa',i,p_pr,t-1)*PT(p_pr,t-1))/RPAR('st',i,p_pr,t-1) ; //changed 20120419
180  for(uint pp=0;pp<priProducts.size();pp++){
181 
182  double sl = gpd("sl",r2,priProducts[pp],secondYear);
183  double sa = gpd("sa",r2,priProducts[pp],secondYear);
184  double eta = gpd("eta",r2,priProducts[pp],secondYear);
185  double pl = gpd("pl",r2,priProducts[pp],secondYear);
186  double pWo = gpd("pl",WL2,priProducts[pp],secondYear); // World price (local price for region 99999)
187 
188 
189  double t1 = 1/ ( 1+(pow(sl/sa,1/eta))*(pl/pWo) );
190  double r1 = 1-t1;
191  double sc = pow(
192  t1*pow(sa,(eta-1)/eta) + r1*pow(sl,(eta-1)/eta)
193  ,
194  eta/(eta-1)
195  );
196  double pc = (sa/sc)*pWo+(sl/sc)*pl;
197  double pw = (sl*pl+sa*pWo)/(sl+sa);
198 
199  spd(t1,"t1",r2,priProducts[pp],firstYear,true);
200  //spd(r1,"r1",r2,priProducts[pp],firstYear,true);
201  spd(sc,"sc",r2,priProducts[pp],secondYear,true);
202  spd(pc,"pc",r2,priProducts[pp],secondYear,true);
203  spd(pw,"pw",r2,priProducts[pp],secondYear,true);
204  }
205 
206  // up to here tested with gams output on 20120628, that's fine !!
207  } // end for each region in level 2
208 
209 
210  // initializing the exports to zero quantities
211  // initializing of the transport cost for the same region to one and distance to zero
212  for(uint r1=0;r1<l2r.size();r1++){
213  for(uint r2=0;r2<l2r[r1].size();r2++){
214  for(uint p=0;p<allProducts.size();p++){
215  for(uint r2To=0;r2To<l2r[r1].size();r2To++){
216  spd(0,"rt",l2r[r1][r2],allProducts[p],secondYear,true,i2s(l2r[r1][r2To])); // regional trade, it was exp in gams
217  if(l2r[r1][r2] == l2r[r1][r2To]){
218  spd(1,"ct",l2r[r1][r2],allProducts[p],firstYear,true,i2s(l2r[r1][r2To])); // as long this value is higher than zero, rt within the same region is not choosen by the solver, so the value doesn't really matters. If it is zero, the solver still works and results are the same, but reported rt within the region are crazy high (100000)
219  }
220  }
221  } // end each product
222 
223  for(uint r2To=0;r2To<l2r[r1].size();r2To++){
224  if(l2r[r1][r2] == l2r[r1][r2To]){
225  spd(0,"dist",l2r[r1][r2],"",firstYear,true,i2s(l2r[r1][r2To])); // setting distance zero in code, so no need to put it in the data
226  }
227  }
228  } // end of r2 regions
229  } // end of r1 region
230 }
231 
232 void
234  msgOut(MSG_INFO, "Starting market module");
235  static double cumOverHarvesting = 0.0;
236  int thisYear = MTHREAD->SCD->getYear();
237  int previousYear = MTHREAD->SCD->getYear()-1;
238 
239  // Added for carbon poject -------------------------------
240  double intRate = MTHREAD->MD->getDoubleSetting("ir");
241 
242  // ----------------------------------------------------------------
243 
244  // *** PRE-OPTIMISATION YEARLY OPERATIONS..
245  for(uint i=0;i<regIds2.size();i++){
246  int r2 = regIds2[i];
247  for(uint sp=0;sp<secProducts.size();sp++){
248  double g1 = gpd("g1",r2,secProducts[sp],previousYear);
249  double sigma = gpd("sigma",r2,secProducts[sp]);
250  double pc_1 = gpd("pc",r2,secProducts[sp],previousYear);
251  double dc_1 = gpd("dc",r2,secProducts[sp],previousYear);
252  double k_1 = gpd("k",r2,secProducts[sp],previousYear);
253  double pol_mktDirInt_d = gpd("pol_mktDirInt_d",r2,secProducts[sp]);
254  double pol_mktDirInt_d_1 = gpd("pol_mktDirInt_d",r2,secProducts[sp],previousYear);
255  double q1 = gpd("q1",r2,secProducts[sp]);
256  double pol_mktStr_d = gpd("pol_mktStr_d",r2,secProducts[sp]);
257 
258  double q1_polCorr = min(1.0, (q1-0.5)*pol_mktStr_d+0.5); // The pol_mktStr_s coefficient makes t1 (the "b" weight in the CES function) converge toward 0.5 as pol_mktStr_s goes to 0. As pol_mktStr_s can be above 1, an upper cap of unity is imposed to t1
259 
260 
261 
262  double k = (1+g1)*k_1;
263  double aa = (sigma/(sigma+1))*pc_1*pow(dc_1,-1/sigma) * pol_mktDirInt_d_1/pol_mktDirInt_d ;
264  double gg = dc_1*pow(pc_1*pol_mktDirInt_d_1,-sigma); //alpha
265 
266  spd(k, "k" ,r2,secProducts[sp]);
267  spd(aa,"aa",r2,secProducts[sp],DATA_NOW,true);
268  spd(gg,"gg",r2,secProducts[sp],DATA_NOW,true);
269  spd(q1_polCorr,"q1_polCorr",r2,secProducts[sp],DATA_NOW,true);
270 
271  }
272 
273  // BB(i,p_pr) = (sigma(p_pr)/(sigma(p_pr)+1))*RPAR('pc',i,p_pr,t-1)*(RPAR('sc',i,p_pr,t-1)**(-1/sigma(p_pr)))*(In(i,p_pr,t-1)/In(i,p_pr,t))**(gamma(p_pr)/sigma(p_pr));
274  // FF(i,p_pr) = RPAR('sc',i,p_pr,t-1)*((RPAR('pc',i,p_pr,t-1))**(-sigma(p_pr)))*(In(i,p_pr,t)/In(i,p_pr,t-1))**(gamma(p_pr)); //chi
275  for(uint pp=0;pp<priProducts.size();pp++){
276  double gamma_incr = gpd("gamma_incr",r2,priProducts[pp]); // elast supply to increasing stocks
277  double gamma_decr = gpd("gamma_decr",r2,priProducts[pp]); // elast supply to decreasing stocks
278  double sigma = gpd("sigma",r2,priProducts[pp]); // elast supply to price
279  double pc_1 = gpd("pc",r2,priProducts[pp],previousYear);
280  double sc_1 = gpd("sc",r2,priProducts[pp],previousYear);
281  double in = gpd("in",r2,priProducts[pp])+gpd("in_deathTimber",r2,priProducts[pp]);
282  double in_1 = gpd("in",r2,priProducts[pp],previousYear)+gpd("in_deathTimber",r2,priProducts[pp],previousYear);
283  double pol_mktDirInt_s = gpd("pol_mktDirInt_s",r2,priProducts[pp]);
284  double pol_mktDirInt_s_1 = gpd("pol_mktDirInt_s",r2,priProducts[pp],previousYear);
285  double t1 = gpd("t1",r2,priProducts[pp]);
286  double pol_mktStr_s = gpd("pol_mktStr_s",r2,priProducts[pp]);
287 
288 
289  // Added for carbon project ------------------------------------------
290  double carbonPrice_1 = gpd("carbonPrice",r2,"",previousYear); //using dummy region until Anto solves issue // Antonello: solved but also generalised
291  double omega = gpd("co2content_products", r2, priProducts[pp])/1000; //kg to tons
292  double Pct_1 = carbonPrice_1*intRate;
293 
294  // -------------------------------------------------------------------
295 
296 
297  double t1_polCorr = min(1.0, (t1-0.5)*pol_mktStr_s+0.5); // The pol_mktStr_s coefficient makes t1 (the "b" weight in the CES function) converge toward 0.5 as pol_mktStr_s goes to 0. As pol_mktStr_s can be above 1, an upper cap of unity is imposed to t1
298 
299 
300  double gamma = in>in_1 ? gamma_incr: gamma_decr; // If inventories resources are decreasing we use a given elasticity, if they are increasing we use an other one
301 
302  double in_ratio = in/in_1;
303 
304  double bb = (sigma/(sigma+1.0))*pc_1*pow(sc_1,-1.0/sigma)*pow(1/in_ratio,gamma/sigma)*pow(1.0,1.0/sigma) * pol_mktDirInt_s_1/pol_mktDirInt_s ;
305 
306  /* Old FF (without carbon)
307  double ff = sc_1*pow(pc_1*pol_mktDirInt_s_1,-sigma)*pow(in_ratio,gamma); //chi
308  */
309 
310  // Added for carbon project
311  double ff = sc_1*pow((pc_1 - omega * Pct_1)*pol_mktDirInt_s_1,-sigma)*pow(in_ratio,gamma);
312  // -----------------------------------------------
313 
314  spd(bb,"bb",r2,priProducts[pp],DATA_NOW,true);
315  spd(ff,"ff",r2,priProducts[pp],DATA_NOW,true);
316  spd(sigma,"sigma",r2,priProducts[pp],DATA_NOW,true);
317  spd(t1_polCorr,"t1_polCorr",r2,priProducts[pp],DATA_NOW,true);
318 
319  }
320 
321  } // end for each region in level 2 (and updating variables)
322 
323 
324 
325  // *** OPTIMISATION....
326 
327  // Create an instance of the IpoptApplication
328  //Opt *OPTa = new Opt(MTHREAD);
329  //SmartPtr<TNLP> OPTa = new Opt(MTHREAD);
330  SmartPtr<IpoptApplication> application = new IpoptApplication();
331  string linearSolver = MTHREAD->MD->getStringSetting("linearSolver",DATA_NOW);
332  application->Options()->SetStringValue("linear_solver", linearSolver); // default in ipopt is ma27
333  //application->Options()->SetStringValue("hessian_approximation", "limited-memory"); // quasi-newton approximation of the hessian
334  //application->Options()->SetIntegerValue("mumps_mem_percent", 100);
335  application->Options()->SetNumericValue("obj_scaling_factor", -1); // maximisation
336  application->Options()->SetNumericValue("max_cpu_time", 1800); // max 1/2 hour to find the optimus for one single year
337  application->Options()->SetStringValue("check_derivatives_for_naninf", "yes");
338  //application->Options()->SetStringValue("halt_on_ampl_error", "yes"); // not a valid option
339  //application->Options()->SetStringValue("print_options_documentation", "yes"); // print the ipopt options (hundreds of pages!)
340  // Relaxing tollerance options... worster effect :-(
341  //application->Options()->SetNumericValue("tol", 1e-07);
342  //application->Options()->SetNumericValue("constr_viol_tol",0.001);
343  //application->Options()->SetNumericValue("compl_inf_tol",0.001);
344  //application->Options()->SetNumericValue("acceptable_tol", 1e-05);
345  //application->Options()->SetNumericValue("acceptable_dual_inf_tol", 1e+9);
346  //application->Options()->SetNumericValue("acceptable_constr_viol_tol", 0.1);
347  //application->Options()->SetNumericValue("acceptable_compl_inf_tol", 0.1);
348 
349 
350  // Initialize the IpoptApplication and process the options
351  ApplicationReturnStatus status;
352  status = application->Initialize();
353  if (status != Solve_Succeeded) {
354  printf("\n\n*** Error during initialization!\n");
355  msgOut(MSG_INFO,"Error during initialization! Do you have the solver compiled for the specified linear solver?");
356  return;
357  }
358 
359  msgOut(MSG_INFO,"Running optimisation problem for this year (it may take a few minutes for large models)..");
360  status = application->OptimizeTNLP(MTHREAD->OPT);
361 
362 
363  // *** POST OPTIMISATION....
364 
365  // post-equilibrium variables->parameters assignments..
366  // RPAR(type,i,prd,t) = RVAR.l(type,i,prd);
367  // EX(i,j,prd,t) = EXP.l(i,j,prd);
368  // ObjT(t) = Obj.l ;
369  // ==> in Opt::finalize_solution()
370 
371  // Retrieve some statistics about the solve
372  if (status == Solve_Succeeded) {
373  Index iter_count = application->Statistics()->IterationCount();
374  Number final_obj = application->Statistics()->FinalObjective();
375  printf("\n*** The problem solved year %d in %d iterations!\n", thisYear, iter_count);
376  printf("\n*** The final value of the objective function is %e.\n", final_obj);
377  msgOut(MSG_INFO, "The problem solved successfully year "+i2s(thisYear)+" in "+i2s(iter_count)+" iterations.");
378  int icount = iter_count;
379  double obj = final_obj;
380  MTHREAD->DO->printOptLog(true, icount, obj);
381  } else {
382  //Number final_obj = application->Statistics()->FinalObjective();
383  int thisYear = MTHREAD->SCD->getYear();
384  cout << "***ERROR: MODEL DIDN'T SOLVE FOR THIS YEAR ("<<thisYear<<")"<< endl;
385  msgOut(MSG_CRITICAL_ERROR, "Model DIDN'T SOLVE for this year ("+i2s(thisYear)+")");
386  // IMPORTANT! Don't place the next two lines above the msgOut() function or it will crash in windows if the user press the stop button
387  //Index iter_count = application->Statistics()->IterationCount(); // syserror if model doesn't solve
388  //Number final_obj = application->Statistics()->FinalObjective();
389  int icount = 0;
390  double obj = 0;
391  MTHREAD->DO->printOptLog(false, icount, obj);
392  }
393 
394  for(uint r2= 0; r2<regIds2.size();r2++){ // you can use r2<=regIds2.size() to try an out-of range memory error that is not detected other than by valgrind (with a message "Invalid read of size 4 in ModelCore::runSimulationYear() in src/ModelCore.cpp:351")
395  int regId = regIds2[r2];
396  ModelRegion* REG = MTHREAD->MD->getRegion(regId);
397 
398  // *** st adjustments:***
399  // st_or[pp] -> original st from the mkt module
400  // st_fromFor[pp] -> cover the volumes from both alive trees and death ones. This should be bounded by inResByAnyCombination, that already includes death resources
401  // st_fromHarv[pp] -> from harvesting. This should be equal to hV and IGN data
402  // st
403  // st_or = st
404  // st_fromFor = st_or * a
405  // st_fromFor > in:
406  // (overharvesting)
407  // overHarv = st_fromFor - in
408  // st_fromFor = in
409  // st = st_fromFor/a
410  // st_fromHarv = distribute(st_fromFor)
411 
412  // // total supply and total demand..
413  // RPAR('st',i,prd,t) = RPAR('sl',i,prd,t)+RPAR('sa',i,prd,t);
414  // RPAR('dt',i,prd,t) = RPAR('dl',i,prd,t)+RPAR('da',i,prd,t);
415  // // weighted prices.. //changed 20120419
416  // RPAR('pw',i,p_tr,t) = (RPAR('dl',i,p_tr,t)*RPAR('pl',i,p_tr,t)+RPAR('da',i,p_tr,t)*PT(p_tr,t))/RPAR('dt',i,p_tr,t) ; //changed 20120419
417  // RPAR('pw',i,p_pr,t) = (RPAR('sl',i,p_pr,t)*RPAR('pl',i,p_pr,t)+RPAR('sa',i,p_pr,t)*PT(p_pr,t))/RPAR('st',i,p_pr,t) ; //changed 20120419
418  for (uint p=0;p<allProducts.size();p++){
419  double st = gpd("sl",regId,allProducts[p])+gpd("sa",regId,allProducts[p]);
420  double dt = gpd("dl",regId,allProducts[p])+gpd("da",regId,allProducts[p]);
421  spd(st,"st",regId,allProducts[p]);
422  spd(st,"st_or",regId,allProducts[p],DATA_NOW,true); // original total supply, not corrected by resetting it to min(st, inv).
423  spd(dt,"dt",regId,allProducts[p]);
424  }
425  for (uint p=0;p<secProducts.size();p++){
426  double dl = gpd("dl",regId,secProducts[p]);
427  double pl = gpd("pl",regId,secProducts[p]);
428  double da = gpd("da",regId,secProducts[p]); // bug corrected 20120913
429  double pworld = gpd("pl", WL2,secProducts[p]);
430  double dt = gpd("dt",regId,secProducts[p]);
431  double pw = dt?(dl*pl+da*pworld)/dt:0.0;
432  spd(pw,"pw",regId,secProducts[p]);
433  }
434  for (uint p=0;p<priProducts.size();p++){
435  double sl = gpd("sl",regId,priProducts[p]);
436  double pl = gpd("pl",regId,priProducts[p]);
437  double sa = gpd("sa",regId,priProducts[p]); // bug corrected 20120913
438  double pworld = gpd("pl", WL2,priProducts[p]);
439  double st = gpd("st",regId,priProducts[p]);
440  double st_fromFor = st * gpd("supply_fromForestShare",regId,priProducts[p]);
441  double pw = st?(sl*pl+sa*pworld)/st:0.0;
442  spd(pw,"pw",regId,priProducts[p]);
443  spd(st_fromFor,"st_fromFor",regId,priProducts[p],DATA_NOW,true);
444  }
445 
446  // Correcting st if this is over the in
447 
448  // Create a vector with all possible combinations of primary products
449  vector<vector<int>> priPrCombs = MTHREAD->MD->createCombinationsVector(priProducts.size());
450  int nPriPrCombs = priPrCombs.size();
451 
452  for (uint i=0;i<priPrCombs.size();i++){
453  double stMkMod = 0.0;
454  double sumIn = REG->inResByAnyCombination[i];
455  // double sumIn2 = 0.0;
456  for (uint p=0;p<priPrCombs[i].size();p++){
457  stMkMod += gpd("st_fromFor",regId,priProducts[priPrCombs[i][p]]);
458  //sumIn2 += gpd("in",regId,priProducts[priPrCombs[i][p]]);
459  }
460 
461  //if(sumIn<=0.00001){
462  // for (uint p=0;p<priPrCombs[i].size();p++){
463  // spd(0.0,"st",regId,priProducts[priPrCombs[i][p]]);
464  // }
465  // } else {
466  if(stMkMod>sumIn){ // if we harvested more than available
467  string pProductsInvolved = "";
468  for (uint p=0;p<priPrCombs[i].size();p++){
469  pProductsInvolved += (priProducts[priPrCombs[i][p]]+"; ");
470  }
471  double inV_over_hV_ratio = stMkMod ? sumIn/stMkMod : 0.0;
472  cumOverHarvesting += (stMkMod-sumIn);
473  msgOut(MSG_DEBUG, "Overharvesting has happened. Year: "+i2s(thisYear)+ "Region: "+i2s(regId)+"Involved products: "+pProductsInvolved+". sumIn: "+d2s(sumIn)+" stMkMod:" +d2s(stMkMod) + " cumOverHarvesting: "+d2s(cumOverHarvesting));
474  for (uint p=0;p<priPrCombs[i].size();p++){
475  double st_fromFor_orig = gpd("st_fromFor",regId,priProducts[priPrCombs[i][p]]);
476  spd(st_fromFor_orig*inV_over_hV_ratio,"st_fromFor",regId,priProducts[priPrCombs[i][p]]);
477  }
478  }
479 
480  //}
481 
482 
483  }
484 
485  // here we create st_fromHarv as st_fromFor - st_from_deathbiomass
486  vector <double> total_st(priProducts.size(),0.);
487  vector <double> in_deathTimber(priProducts.size(),0.);
488  vector <double> in_aliveForest (priProducts.size(),0.);
489  for (uint i=0;i<priProducts.size();i++){
490  total_st[i] = gpd("st_fromFor",regId,priProducts[i]);
491  in_deathTimber[i] = gpd("in_deathTimber",regId,priProducts[i]);
492  in_aliveForest[i] = gpd("in",regId,priProducts[i]);
493  }
494 
495  vector <double> st_fromHarv= allocateHarvesting(total_st, regId);
496 
497  for (uint i=0;i<priProducts.size();i++){
498  spd(st_fromHarv[i],"st_fromHarv",regId,priProducts[i],DATA_NOW,true);
499  }
500 
501  } // end of each region
502  if (cumOverHarvesting>0.0){
503  msgOut(MSG_DEBUG, "Overharvesting is present. Year: "+i2s(thisYear)+" cumOverHarvesting: "+d2s(cumOverHarvesting));
504  }
505 }
506 
507 
508 /**
509  * @brief ModelCoreSpatial::runBiologicalModule
510  *
511  * Changes in Area:
512  * dc area_l area diff
513  * 0 ---------> +regArea -areaFirstProdClass (areaMovingUp_00)
514  * 15 ---------> +areaFirstPrClass -hArea_15 -areaMovingUp_15
515  * 25 ---------> +areaMovingUp15 - hArea_25 - areaMovingUp_25
516  * 35 ---------> +areaMovingUp25 - hArea_35 - areaMovingUp_35
517  * ...
518  * 95 ---------> +areaMovingUp85 - hArea_95 - areaMovingUp_95
519  * 105 ---------> +areaMovingUp95 - hArea_105
520  *
521  * note: regArea is computed in the management module, not here. Further, regArea is already the net one of forest area changes
522  */
523 void
525 
526  msgOut(MSG_INFO, "Starting resource module..");
527  int thisYear = MTHREAD->SCD->getYear();
528  bool useDeathTimber = MD->getBoolSetting("useDeathTimber",DATA_NOW);
529 
530  for(uint i=0;i<regIds2.size();i++){
531  int r2 = regIds2[i];
532  int regId = r2;
533  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
534  //Gis* GIS = MTHREAD->GIS;
535  regPx = REG->getMyPixels();
536  double shareMortalityUsableTimber = 0.0;
537 
538  for (uint p=0;p<regPx.size();p++){
539  Pixel* px = regPx[p];
540 
541  double pxId = px->getID();
542  //if (pxId == 3550.0){
543  // cout << "got the pixel" << endl;
544  //}
545  //px->expectedReturns.clear();
546  for(uint j=0;j<fTypes.size();j++){
547  string ft = fTypes[j];
548  //double pxArea_debug = px->getDoubleValue("forArea_"+ft, true);
549  vector <double> hV_byDiam;
550  vector <double> productivity_byDc; // mc/ha/y
551  vector < vector <double> > hV_byDiamAndPrd;
552  vector <double> hArea_byDc;
553  vector <double> newVol_byDiam;
554  vector <double> vMort_byDc;
555  vector <double> areasMovingUp(dClasses.size(), 0.0);
556  double areaFirstProdClass;
557 
558 
559  // A - COMPUTING THE REGENERATION..
560  // if we are in a year where the time of passage has not yet been reached
561  // for the specific i,e,l then we use the exogenous Vregen, otherwise we
562  // calculate it
563  //if ( not scen("fxVreg") ,
564  // loop( (i,essence,lambda),
565  // if( ord(t)>=(tp_u1(i,essence,lambda)+2),
566  // Vregen(i,lambda,essence,t)=regArea(i,essence,lambda,t-tp_u1(i,essence,lambda))*volHa_u1(i,essence,lambda)/1000000 ;
567  // );
568  // );
569  //);
570  int tp_u0 = px->tp.at(j).at(0); // time of passage to reach the first production diameter class // bug 20140318, added ceil. 20140318 removed it.. model did go crazy with it
571  if(thisYear == secondYear){
572  px->initialDc0Area.push_back(px->area_l.at(j).at(0));
573  }
574  if(regType != "fixed" && (thisYear-secondYear) >= tp_u0 ) { // T.O.D.O to be checked -> 20121109 OK
575  double pastRegArea = px->getPastRegArea(j,thisYear-tp_u0);
576  double availableArea = px->area_l.at(j).at(0);
577  //double entryVolHa = gfd("entryVolHa",regId,ft,"");
578  double vHa = px->vHa.at(j).at(1);
579  //attenction that at times could take the wrong pastRegArea if tp change too suddenly as in some "strange" scenarios
580  if (oldVol2AreaMethod){
581  areaFirstProdClass = pastRegArea;
582  } else {
583  areaFirstProdClass = min(availableArea, pastRegArea); // this is just a start and will need to include the last year area
584  }
585  px->vReg.push_back(areaFirstProdClass*vHa/1000000.0); // TO.DO: check the 1000000. Should be ok, as area in ha vol in Mm^3
586  //if (pxId == 3550.0 && j==3){
587  // cout << "got the pixel" << endl;
588  //}
589  #ifdef QT_DEBUG
590  if (areaFirstProdClass < 0.0){
591  //msgOut(MSG_CRITICAL_ERROR,"Negative regeneration volumes in endogenous regeneration");
592  }
593  if ( (availableArea-pastRegArea) < -0.00000001 ){
594  // in a very rare cases tp change first in a direction and then in the other, so that the wrong past regeneration area
595  // is picken up.
596  //msgOut(MSG_CRITICAL_ERROR,"Upgrading from dc0 more area than the available one in endogenous regeneration");
597  }
598  #endif
599  } else {
600  double regionArea = REG->getValue("forArea_"+ft,OP_SUM);
601  double pxArea = px->getDoubleValue("forArea_"+ft, true); // 20121109 bug solved (add get zero for not data)
602  double regRegVolumes = gfd("vReg",r2,ft,"");
603  double newVReg = regionArea ? regRegVolumes*pxArea/regionArea : 0.0;
604  px->vReg.push_back(newVReg); // 20121108 BUG !!! solved // as now we have the area we could also use here entryVolHa
605  // only a share of the exogenous area goes up, the regeneration one doesn't yet reach tp0:
606  // areaFirstProdClass = (1.0 / px->tp.at(j).at(0) ) * px->area_l.at(j).at(0);
607  areaFirstProdClass = (1.0 / ((double) tp_u0) ) * px->initialDc0Area.at(j);
608  // in the exogenous period we are exogenously upgrading u0->u1 some areas but, as we do not have the regeneration
609  // are corresponding to that we have also to manually add it to u0
610  //px->area_l.at(j).at(0) += areaFirstProdClass;
611  //areaFirstProdClass = entryVolHa ? newVReg*1000000 /entryVolHa:0.0;
612  //if (pxId == 3550.0 && j==3){
613  // cout << "got the pixel" << endl;
614  //}
615 
616  #ifdef QT_DEBUG
617  if (areaFirstProdClass<0.0){
618  // msgOut(MSG_CRITICAL_ERROR,"Negative regeneration volumes in exogenous regeneration");
619  }
620  if (areaFirstProdClass > px->area_l.at(j).at(0)){
621  //msgOut(MSG_CRITICAL_ERROR,"Moving up area higher than available area in exogenous regeneration !");
622  }
623  #endif
624  // vReg and entryVolHa are NOT the same thing. vReg is the yearly regeneration volumes
625  // for the whole region. We can use them when we don't know the harvested area
626  // entryVolHa can lead to vReg calculation only when we know the regeneration area. So in the
627  // first years we use vReg and subsequently the endogenous one.
628  }
629 
630  //double harvestedArea = 0;
631 
632  if(useDeathTimber){
633  shareMortalityUsableTimber = gfd("shareMortalityUsableTimber",r2,ft,"");
634  } else {
635  shareMortalityUsableTimber = 0.0;
636  }
637 
638  for (uint u=0; u<dClasses.size(); u++){
639  string dc = dClasses[u];
640  double hr = 0;
641  double age = getAvgAgeByDc(px, j, u);
642 
643  //double pastYearVol_reg = u ? gfd("vol",r2,ft,dc,thisYear-1): 0;
644  double pastYearVol = px->vol_l.at(j).at(u);
645  vector <double> hV_byPrd;
646  vector <double> hr_byPrd;
647 
648  // harvesting rate & volumes...
649  // hr is by region.. no reasons in one pixel the RATE of harvesting will be different than in an other pixel
650  //hr(u,i,essence,lambda,t) = sum(p_pr, prov(u,essence,lambda,p_pr)*RPAR('st',i,p_pr,t)/In(i,p_pr,t));
651  //hV(u,i,essence,lambda,t) = hr(u,i,essence,lambda,t) * V(u,i,lambda,essence,t-1);
652  //hV_byPrd(u,i,essence,lambda,p_pr,t) = prov(u,essence,lambda,p_pr)*(RPAR('st',i,p_pr,t)/In(i,p_pr,t))*V(u,i,lambda,essence,t-1);
653  for(uint pp=0;pp<priProducts.size();pp++){
654  double st = gpd("st_fromHarv",r2,priProducts[pp]);
655  double in = gpd("in",r2,priProducts[pp]);
656  double hr_pr = in ? app(priProducts[pp],ft,dc)*st/in : 0.0;
657  hr_byPrd.push_back( hr_pr);
658  hr += hr_pr;
659  }
660 
661  // adjusting for overharvesting..
662  // 20160204: inserted to account that we let supply to be marginally higher than in in the mamarket module, to let the solver solving
663  double origHr = hr;
664  hr = min(1.0,hr);
665  for(uint pp=0;pp<priProducts.size();pp++){
666  double hr_pr = origHr ? hr_byPrd[pp] * min(1.0,1.0/origHr) : 0.0;
667  hV_byPrd.push_back( hr_pr*pastYearVol*px->avalCoef.at(j));
668  }
669 
670  double hV = hr*pastYearVol*px->avalCoef.at(j);
671 
672 
673  hV_byDiam.push_back(hV);
674  hV_byDiamAndPrd.push_back(hV_byPrd);
675 
676  // post harvesting remained volumes computation..
677  // loop(u$(ord(u)=1),
678  // first diameter class, no harvesting and fixed regenaration..
679  // V(u,i,lambda,essence,t)=(1-1/(tp(u,i,lambda,essence))-mort(u,i,lambda,essence) )*V(u,i,lambda,essence,t-1)
680  // +Vregen(i,lambda,essence,t);
681  // );
682  // loop(u$(ord(u)>1),
683  // generic case..
684  // V(u,i,lambda,essence,t)=((1-1/(tp(u,i,lambda,essence))
685  // -mort(u,i,lambda,essence) - hr(u,i,essence,lambda,t))*V(u,i,lambda,essence,t-1)
686  // +(1/(tp(u-1,i,lambda,essence)))*beta(u,i,lambda,essence)*V(u-1,i,lambda,essence,t-1));
687  double vol;
688  double tp = px->tp.at(j).at(u); //gfd("tp",regId,ft,dc);
689  double mort = px->mort.at(j).at(u); //gfd("mortCoef",regId,ft,dc);
690  double vReg = px->vReg.at(j); //gfd("vReg",regId,ft,""); // Taking it from the memory database as we could be in a fixed vReg scenario and not having calculated it from above!
691  double beta = px->beta.at(j).at(u); //gfd("betaCoef",regId,ft,dc);
692  //double hv2fa = gfd("hv2fa",regId,ft,dc);
693  double vHa = px->vHa.at(j).at(u); //gfd("vHa",regId,ft,dc);
694  double finalHarvestFlag = gfd("finalHarvestFlag",regId,ft,dc);
695 
696  double vMort = mort*pastYearVol;
697 
698  vMort_byDc.push_back(vMort);
699 
700  if(useDeathTimber){
701  iisskey key(thisYear,r2,ft,dc);
702 
703  MD->deathTimberInventory_incrOrAdd(key,vMort*shareMortalityUsableTimber);
704  }
705 
706  if(u==0){
707  vol = 0.0;
708  }else if(u==1){
709  vol = max(0.0,(1-1/tp-mort))*pastYearVol+vReg; //Antonello, "bug" fixed 20160203: In case of very strong mortality this quantity (that doesn't include harvesting) could be negative!
710  //double debug = vol;
711  #ifdef QT_DEBUG
712  if ((1-1/tp-mort)<0.0){
713  msgOut(MSG_DEBUG,"The sum of leaving trres and mortality would have lead to nevative volume if we didn't put a max. 1/tp: "+d2s(1/tp)+", mort: "+d2s(mort)+", total coeff: "+d2s((1-1/tp-mort))+" ");
714  }
715  #endif
716  } else {
717  // time of passage and volume of smaller diameter class
718  double inc = (u==dClasses.size()-1)?0:1./tp; // we exclude the possibility for trees in the last diameter class to move to an upper class
719  double tp_1 = px->tp.at(j).at(u-1); //gfd("tp",regId,ft,dClasses[u-1]);
720  double pastYearVol_1 = px->vol_l.at(j).at(u-1); //gfd("vol",regId,ft,dClasses[u-1],thisYear-1);
721  //vol = max(0.0,(1-inc-mort-hr)*pastYearVol+(1/tp_1)*beta*pastYearVol_1);
722  vol = max(0.0,(1-inc-mort)*pastYearVol-hV+(1/tp_1)*beta*pastYearVol_1); // I can't use any more hr as it is the harvesting rate over the available volumes, not the whole ones
723  #ifdef QT_DEBUG
724  if ((1-inc-mort)*pastYearVol-hV+(1/tp_1)*beta*pastYearVol_1 < 0){
725  double realVolumes = (1-inc-mort)*pastYearVol-hV+(1/tp_1)*beta*pastYearVol_1;
726  msgOut(MSG_DEBUG,"Negative real volumes ("+d2s(realVolumes)+"), possibly because of little bit larger bounds in the market module to avoid zeros. Volumes in the resource module set back to zero, so it should be ok.");
727  }
728  #endif
729  }
730  if(u != 0){ // this if is required to avoid a 0/0 and na error that then propage also in vSum()
731  double inc = (u==dClasses.size()-1)?0:1.0/tp; // we exclude the possibility for trees in the last diameter class to move to an upper class
732  double volumesMovingUp = inc*pastYearVol;
733  double pastArea = px->area_l.at(j).at(u);
734 
735  areasMovingUp.at(u) = inc*pastArea;
736 
737  if(oldVol2AreaMethod) {
738  hArea_byDc.push_back(finalHarvestFlag*1000000*hV/vHa); // volumes are in Mm^3, area in ha, vHa in m^3/ha
739  } else {
740  double finalHarvestedVolumes = finalHarvestFlag* hV;
741  double finalHarvestedRate = pastYearVol?finalHarvestedVolumes/pastYearVol:0.0; // Here we want the harvested rate over the whole volumes, not just the available ones, so we don't need to multiply to px->avalCoef.at(j)
742  #ifdef QT_DEBUG
743  if (finalHarvestedRate > 1.0){
744  msgOut(MSG_CRITICAL_ERROR,"Negative final harvested rate.");
745  }
746  #endif
747  hArea_byDc.push_back(finalHarvestedRate*pastArea); // volumes are in Mm^3, area in ha, vHa in m^3/ha
748  }
749  px->area.at(j).at(u) = max(0.0, px->area_l.at(j).at(u) - areasMovingUp.at(u) + areasMovingUp.at(u-1) - hArea_byDc.at(u));
750  #ifdef QT_DEBUG
751  if ((px->area_l.at(j).at(u) - areasMovingUp.at(u) + areasMovingUp.at(u-1) - hArea_byDc.at(u))< 0.0){
752  msgOut(MSG_DEBUG,"If not for a max, we would have had a negative area ("+d2s(px->area_l.at(j).at(u) - areasMovingUp.at(u) + areasMovingUp.at(u-1) - hArea_byDc.at(u))+" ha).");
753  }
754  #endif
755  } else {
756  areasMovingUp.at(u) = areaFirstProdClass;
757  hArea_byDc.push_back(0.);
758  px->area.at(j).at(u) = px->area_l.at(j).at(u) - areasMovingUp.at(u) - hArea_byDc.at(u);
759  //if (pxId == 3550.0 && j==3){
760  // cout << "got the pixel" << endl;
761  //}
762  }
763  newVol_byDiam.push_back(vol);
764  #ifdef QT_DEBUG
765  if(px->area.at(j).at(u)< 0.0 || areasMovingUp.at(u) < 0.0 || hArea_byDc.at(u) < 0.0 ){
766  msgOut(MSG_CRITICAL_ERROR, "Negative values in runBiologicalModel");
767  }
768  #endif
769 
770  //double debug = hv2fa*hr*pastYearVol*100;
771  //cout << "regId|ft|dc| debug | freeArea: " << r2 << "|"<<ft<<"|"<<dc<<"| "<< debug << " | " << freeArea_byU << endl;
772 
773  //sfd(hr,"hr",regId,ft,dc);
774  //sfd(hV,"hV",regId,ft,dc);
775  //sfd(vol,"vol",regId,ft,dc);
776 
777  //sfd(freeArea_byU,"harvestedArea",regId,ft,dc,DATA_NOW,true);
778  double productivity = hArea_byDc.at(u) ? (1000000.0 * hV_byDiam.at(u))/(hArea_byDc.at(u)*age) : 0.0;
779  productivity_byDc.push_back(productivity);
780  } // end foreach diameter classes
781  px->hVol.push_back(hV_byDiam);
782  px->hVol_byPrd.push_back(hV_byDiamAndPrd);
783  px->hArea.push_back(hArea_byDc);
784  px->vol.push_back(newVol_byDiam);
785  px->vMort.push_back(vMort_byDc);
786  px->hProductivity.push_back(productivity_byDc);
787 
788 
789  #ifdef QT_DEBUG
790  for (uint u=1; u<dClasses.size(); u++){
791  double volMort = vMort_byDc[u];
792  double harvVol = hV_byDiam[u];
793  double vol_new = newVol_byDiam[u];
794  double vol_lagged = px->vol_l.at(j).at(u);
795  double gain = vol_new - (vol_lagged-harvVol-volMort);
796  if (volMort > vol_lagged){
797  msgOut(MSG_CRITICAL_ERROR,"mort vol > lagged volumes ?");
798  }
799  }
800  #endif
801  } // end of each forest type
802  } // end of each pixel
803 
804  #ifdef QT_DEBUG
805  // checking that in a region the total hVol is equal to the st for each products. 20150122 Test passed with the new availCoef
806  double sumSt = 0.0;
807  double sumHv = 0.0;
808  for(uint pp=0;pp<priProducts.size();pp++){
809  sumSt += gpd("st_fromHarv",r2,priProducts[pp]);
810  }
811  for (uint p=0;p<regPx.size();p++){
812  for(uint j=0;j<fTypes.size();j++){
813  for (uint u=0; u<dClasses.size(); u++){
814  for(uint pp=0;pp<priProducts.size();pp++){
815  // by ft, dc, pp
816  sumHv += regPx[p]->hVol_byPrd[j][u][pp];
817  }
818  }
819  }
820  }
821  if(abs(sumSt-sumHv) > 0.000001){
822  msgOut(MSG_DEBUG, "St and harvested volumes diverge in region "+REG->getRegSName()+". St: "+d2s(sumSt)+" hV: "+d2s(sumHv));
823  }
824  #endif
825  } // end of each region
826 
827 }
828 
829 
830 
831 void
833  msgOut(MSG_INFO, "Starting management module..");
834  vector<string> allFTypes = MTHREAD->MD->getForTypeIds(true);
835  map<string,double> hAreaByFTypeGroup = vectorToMap(allFTypes,0.0);
836  int thisYear = MTHREAD->SCD->getYear();
837  double ir = MTHREAD->MD->getDoubleSetting("ir");
838 
839  // Post optimisation management module..
840  for(uint i=0;i<regIds2.size();i++){
841  int r2 = regIds2[i];
842  int regId = r2;
843  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
844  regPx = REG->getMyPixels();
845 
846  string flagHartman = MTHREAD->MD->getStringSetting("flagHartman",DATA_NOW,r2);
847  //double exogenousReference = MTHREAD->MD->getBoolSetting("exogenousReference"); // new setting to distinguish between exogenous and endogenous carbon reference levels
848 
849 
850 
851 
852  // Caching futureCarbonPrices..
853  vector<double> futureCarbonPrices;
854  if(flagHartman!="NONE"){
855  for (int y=0; y<1000; y++) {
856  futureCarbonPrices.push_back(gpd("carbonPrice",r2,"",thisYear + y));
857  }
858  }
859 
860  vector<double> polBal_fiSub (fTypes.size(),0.0); // Expenses for forest investment subsides by region and destination ft €
861 
862  // Dealing with area change..
863  double fArea_reg = REG->getArea();
864  double fArea_diff = 0.0;
865  double fArea_reldiff = 0.0;
866  if(forestAreaChangeMethod=="relative"){
867  fArea_reldiff = gfd("forestChangeAreaIncrementsRel",r2,"","",DATA_NOW);
868  fArea_diff = fArea_reg * fArea_reldiff;
869  } else if (forestAreaChangeMethod=="absolute"){
870  fArea_diff = gfd("forestChangeAreaIncrementsHa",r2,"","",DATA_NOW);
871  fArea_reldiff = fArea_diff / fArea_reg;
872  }
873 
874  double regHArea = 0.0; // for the warning
875 
876  // Management rate
877  // 20170123: if mr is defined at regional level in the forData table, good, otherwise we look at settings table.
878  // If it isn't there as well, we rise a critical error.
879  // 20170302: removed the (false positive) error message raised if we put mr in setting table
880  // 20180626: mr is now, like all the other settings, powered with a regional dimension, so this looking in the
881  // forest data is no longer needed, but we keep it as some old projects have mr in the forData sheet.
882  double mr;
883  int origErrorLevel = MD->getErrorLevel();
884  MTHREAD->MD->setTempBool(true); // this should not be needed as it is set positive in getForData() map soubroutine..
886  mr = MD->getForData("mr",regId,"","");
887  if(!MTHREAD->MD->getTempBool()) { // mr not found in the forest data
889  mr = MD->getDoubleSetting("mr",DATA_NOW,regId);
890  }
891  //try{
892  // mr = MD->getForData("mr",regId,"","");
893  //} catch (...){
894  // mr = MD->getDoubleSetting("mr");
895  //}
896  MD->setErrorLevel(origErrorLevel);
897 
898  // Caching carbon reference levels by ftypes
899  vector<vector<double>> futureCarbonRef;
900  vector<vector<double>> futureExtraBiomass_ratio;
901 
902  if(flagHartman!="NONE"){
903  for(uint j=0;j<fTypes.size();j++){
904  string ft = fTypes[j];
905  vector<double> futureCarbonRef_ft;
906  vector<double>futureExtraBiomass_ratio_ft;
907  for (int y=0; y<1000; y++) {
908  double carbonRef = gfd("carbonRef",r2,ft,"", thisYear + y);
909  futureCarbonRef_ft.push_back(carbonRef);
910  double extraBiomass_ratio = gfd("extraBiomass_ratio",regId,ft,"",DATA_NOW);
911  futureExtraBiomass_ratio_ft.push_back(extraBiomass_ratio);
912  }
913  futureCarbonRef.push_back(futureCarbonRef_ft);
914  futureExtraBiomass_ratio.push_back(futureExtraBiomass_ratio_ft);
915  }
916  }
917 
918 
919  for (uint p=0;p<regPx.size();p++){
920  Pixel* px = regPx[p];
921  px->expectedReturns.clear();
922  px->expectedReturnsNotCorrByRa.clear(); // BUG discovered 20160825
923  //px->expectedReturnsCarbon.clear(); TODO
924  px->optDc.clear();
925  px->optFtChosen.clear();
926  px->optDcChosen.clear();
927  resetMapValues(hAreaByFTypeGroup,0.0);
928  double totalHarvestedAreaOrig = vSum(px->hArea); // still need to remove the forest decrease areas..
929  double totalHarvestedArea = 0.0; // account for (eventual) forest decreases area)
930  vector<double> thisYearRegAreas(fTypes.size(),0.0); // initialize a vector of fTypes.size() zeros.
931  vector<double> expectedReturns(fTypes.size(),0.0); // uncorrected expected returns (without considering transaction costs). These are in form of eai
932  vector<double> expectedReturnsCarbon(fTypes.size(),0.0); // expected return of the carbon only
933  vector<int> rotation_dc(fTypes.size(),0.0); //vector to store optimal dc for each ftype (to be reused when flagHartman == "BAU")
934  vector< vector<double> > deltaAreas(fTypes.size()+1, vector<double>(fTypes.size()+1,0)); // matrix of regeneration areas ft_from/ft_to
935 
936 
937 
938  double fArea_px = vSum(px->area);
939  double fArea_diff_px = fArea_px * fArea_diff/ fArea_reg;
940 
941  double fArea_incr = max(0.0,fArea_diff_px);
942  double fArea_incr_rel = totalHarvestedAreaOrig?fArea_incr/totalHarvestedAreaOrig:0.0;
943  double fArea_decr = - min(0.0,fArea_diff_px);
944  double fArea_decr_rel = totalHarvestedAreaOrig?min(1.0,fArea_decr/totalHarvestedAreaOrig):0.0; // we can "remove" at maximum what has been harvested
945  regHArea += totalHarvestedAreaOrig;
946  totalHarvestedArea = totalHarvestedAreaOrig *(1-fArea_decr_rel);
947 
948 
949  // A - Computing the harvestingArea by parent ft group (for the allocation according to the prob of presence):
950  for(uint j=0;j<fTypes.size();j++){
951  string ft = fTypes[j];
952  string parentFt = MTHREAD->MD->getForTypeParentId(ft);
953  double hAreaThisFt=vSum(px->hArea.at(j))*(1-fArea_decr_rel);
954  incrMapValue(hAreaByFTypeGroup,parentFt,hAreaThisFt); // increment the parent ft of the harvested area, neeed for assigning the frequences (prob. of presence)
955  }
956 
957  // B - Computing the uncorrected expected returns (without considering transaction costs)
958  // 20120910, Antonello: changed.. calculating the expected returns also for fixed and fromHrLevel regeneration (then not used but gives indication)
959  // calculating the expected returns..
960  // loop ( (u,i,essence,lambda,p_pr),
961  // if (sum(u2, hV(u2,i,essence,lambda,t))= 0,
962  // expRetPondCoef(u,i,essence,lambda,p_pr) = 0;
963  // else
964  // expRetPondCoef(u,i,essence,lambda,p_pr) = hV_byPrd(u,i,essence,lambda,p_pr,t)/ sum(u2, hV(u2,i,essence,lambda,t));
965  // );
966  // );
967  // expReturns(i,essence,lambda) = sum( (u,p_pr),
968  // RPAR("pl",i,p_pr,t)*hv2fa(i,essence,lambda,u)*(1/df_byFT(u,i,lambda,essence))* // df_byFT(u,i,lambda,essence)
969  // expRetPondCoef(u,i,essence,lambda,p_pr)
970  // );
971 
972  for(uint j=0;j<fTypes.size();j++){
973  string ft = fTypes[j];
974 
975  // Added for carbon project
976  double co2content_inventory = gfd("co2content_inventory",r2,ft,"",DATA_NOW)/1000; // kilos to tons
977  // End of added
978 
979 
980  double expReturns = 0.;
981  int optDc = 0; // "optimal diameter class", the one on which the expected returns are computed
982  for (uint u=0; u<dClasses.size(); u++){
983  string dc = dClasses[u];
984  double vHa = px->vHa_exp.at(j).at(u);
985  double finalHarvestFlag = gfd("finalHarvestFlag",regId,ft,dc);
986  double cumTp_u = px->cumTp_exp.at(j).at(u);
987  for (uint pp=0;pp<priProducts.size();pp++){
988  double pl = gpd("pl",regId,priProducts[pp]); // note that this is the OBSERVED price. If we call it at current year+cumTp_u we would have the expected price. But we would first have to compute it, as pw is weigthed price world-local and we don't have local price for the future. DONE 20141202 ;-)
989  double worldCurPrice = gpd("pl",WL2,priProducts[pp]);
990  double worldFutPrice = gpd("pl",WL2,priProducts[pp],thisYear+cumTp_u);
991  double sl = gpd("sl",regId,priProducts[pp]);
992  double sa = gpd("sa",regId,priProducts[pp]);
993  double pw_exp = computeExpectedPrice(pl, worldCurPrice, worldFutPrice, sl, sa, px->expTypePrices); //20141030: added the expected price!
994  double raw_amount = finalHarvestFlag*pw_exp*vHa*app(priProducts[pp],ft,dc); // B.U.G. 20121126, it was missing app(pp,ft,dc) !!
995  double anualised_amount = MD->calculateAnnualisedEquivalent(raw_amount,cumTp_u, ir);
996 
997  // If carbon payments are included and the reference is an exogenous fixed value (e.g. year of reference), we compute revenues from carbon after retrieving reference from input files
998  // Added for carbon project-------------------------------------------------------------------------
999  if (flagHartman =="EXO") {
1000  double raw_amount_carbon = 0;
1001  for (int y=0; y<cumTp_u; y++) {
1002  double carbonPrice_y = futureCarbonPrices[y];
1003  double carbonRef = futureCarbonRef[j][y];
1004  double expectedVHa_y = getVHaByYear(px, j, y, futureExtraBiomass_ratio[j][y]);
1005  double carbonPayment_y = carbonPrice_y*ir*(co2content_inventory*expectedVHa_y - carbonRef);
1006  raw_amount_carbon += carbonPayment_y*pow(1+ir,cumTp_u - y);
1007  }
1008  //Now we add the final payment corresponding to carbon sequestered in long-lived products
1009  double pickling_pp = gpd("pickling",regId,priProducts[pp]);
1010  double CarbonPrice_cumTp = futureCarbonPrices[floor(cumTp_u)];
1011  raw_amount_carbon += CarbonPrice_cumTp * pickling_pp * finalHarvestFlag * vHa * app(priProducts[pp],ft,dc); // T.O.D.O BUG ? This shoudn't be -= ? No, it's the price * long term stored that adds to the payments in terms of ir*price*sequestrered wood
1012  // And we finally annualise everything
1013  double anualised_amount_carbon = MD->calculateAnnualisedEquivalent(raw_amount_carbon,cumTp_u, ir);
1014  anualised_amount += anualised_amount_carbon;
1015  //TODO no this is still within a for each product dc class px->expectedReturnsCarbon.push_back(anualised_amount_carbon);
1016  }
1017  // End of added-----------------------------------------------------------------------------------
1018 
1019 
1020 
1021  if (anualised_amount>expReturns) {
1022  expReturns=anualised_amount;
1023  optDc = u;// this is the index of the optimal dc, not the dc itself
1024  }
1025  }
1026  }
1027 
1028  //results are pushed back to pixel only if they are final
1029  if (flagHartman=="NONE" || flagHartman=="EXO") {
1030  px->expectedReturnsNotCorrByRa.push_back(expReturns);
1031  }
1032 
1033  //calculation of expected returns with risk aversion
1034  if(MD->getBoolSetting("heterogeneousRiskAversion",DATA_NOW)){
1035  double ra = px->getDoubleValue("ra");
1036  double cumMort = 1-px->cumAlive_exp.at(j).at(optDc);
1037  //cout << px->getID() << "\t" << ft << "\t\t" << "optDc" << optDc << "\t" << cumMort << endl;
1038  double origExpReturns = expReturns;
1039  expReturns = origExpReturns * (1.0 - ra*cumMort);
1040  }
1041  px->expectedReturns.push_back(expReturns);
1042  px->optDc.push_back(optDc);
1043 
1044  // results are pushed back to pixel only if they are final
1045  if (flagHartman=="NONE" || flagHartman=="EXO") {
1046  px->expectedReturns.push_back(expReturns);
1047  }
1048 
1049  expectedReturns.at(j) = expReturns;
1050  rotation_dc.at(j) = optDc; // here we store the index of optimal dc for the forest type considered
1051 
1052  } // end foreach forest type
1053 
1054 
1055  // Looping again by forest types, here by ft in input (those harvested)
1056  // Added for second carbon project-------------------------------------------------------------------------------------------------------------------------------------
1057  // If carbon payments are included and the reference is BAU (ie, endogenous, we need a new loop-------------------------------------------------------------------
1058  // In this case, the reference is what happens without policy, i.e. the result of the previous loop when (flagHartman =="REFERENCE") is FALSE
1059  // So we just redo another loop with carbon payments using the previous result as reference
1060 
1061  if (flagHartman =="ENDO") { // if carbon payments take place and reference is endogenous (is BAU)
1062  // STEP 1 - retrieve optimal regeneration choice without carbon payments
1063  int ft_opt_index = getMaxPos(expectedReturns); // get ft with max expected returns
1064  string ft_opt = fTypes[ft_opt_index]; // get its name
1065  int dc_opt_index = rotation_dc[ft_opt_index]; // get dc for forest type with max expected return
1066  string dc_opt = dClasses[dc_opt_index]; // get its name
1067  double cumTP_opt = px->cumTp_exp.at(ft_opt_index).at(dc_opt_index);
1068  double co2content_inventory_opt = gfd("co2content_inventory",r2,ft_opt,"",DATA_NOW)/1000; // kilos to tons
1069  // STEP 2 - calculate expected carbon contents on the optimal choice without carbon payments, to use as reference
1070  vector <double> FutureCarbonContent_opt;
1071  for (int y=0; y<cumTP_opt; y++) {
1072  double expectedVHa_opt_y = getVHaByYear(px, ft_opt_index, y, futureExtraBiomass_ratio[ft_opt_index][y]);
1073  FutureCarbonContent_opt.push_back(co2content_inventory_opt*expectedVHa_opt_y);
1074  }
1075  int cumTP_opt_floor = FutureCarbonContent_opt.size(); // need to get an integer value to compare rotation lengths
1076 
1077  // STEP 3 - new loop to find optimal regeneration choice WITH carbon payments, using the choice WITHOUT carbon payments as a reference
1078 
1079  for(uint j=0;j<fTypes.size();j++){
1080  string ft = fTypes[j];
1081  double co2content_inventory = gfd("co2content_inventory",r2,ft,"",DATA_NOW)/1000; // kilos to tons
1082 
1083  double expReturns_hartman = 0.;
1084  int optDc_hartman = 0; // "optimal diameter class", the one on which the expected returns are computed
1085  for (uint u=0; u<dClasses.size(); u++){
1086  string dc = dClasses[u];
1087  double vHa = px->vHa_exp.at(j).at(u);
1088  double finalHarvestFlag = gfd("finalHarvestFlag",regId,ft,dc);
1089  double cumTp_u = px->cumTp_exp.at(j).at(u);
1090  // Here we calculate expected revenues from timber
1091  for (uint pp=0;pp<priProducts.size();pp++){
1092  double pl = gpd("pl",regId,priProducts[pp]); // note that this is the OBSERVED price. If we call it at current year+cumTp_u we would have the expected price. But we would first have to compute it, as pw is weigthed price world-local and we don't have local price for the future. DONE 20141202 ;-)
1093  double worldCurPrice = gpd("pl",WL2,priProducts[pp]);
1094  double worldFutPrice = gpd("pl",WL2,priProducts[pp],thisYear+cumTp_u);
1095  double sl = gpd("sl",regId,priProducts[pp]);
1096  double sa = gpd("sa",regId,priProducts[pp]);
1097  double pw_exp = computeExpectedPrice(pl, worldCurPrice, worldFutPrice, sl, sa, px->expTypePrices); //20141030: added the expected price!
1098  double raw_amount = finalHarvestFlag*pw_exp*vHa*app(priProducts[pp],ft,dc); // B.U.G. 20121126, it was missing app(pp,ft,dc) !!
1099  double anualised_amount = MD->calculateAnnualisedEquivalent(raw_amount,cumTp_u, ir);
1100 
1101  // Now we calculate expected revenues from carbon
1102  double raw_amount_carbon = 0;
1103  for (int y=0; y<cumTp_u; y++) { // calculate yearly payment for each year of the rotation
1104  double carbonPrice_y = futureCarbonPrices[y];
1105  double expectedVHa_y = getVHaByYear(px, j, y, futureExtraBiomass_ratio[j][y]);
1106  double carbonPayment_y;
1107  if (y==0) { // we use euclidian division, so an exception must be made when denominator is equal to 0
1108  carbonPayment_y = carbonPrice_y*ir*(co2content_inventory*expectedVHa_y - FutureCarbonContent_opt[y]);
1109  } else {
1110  int factor = y/cumTP_opt_floor; // euclidian division of the two lengths we compare
1111  if (cumTP_opt_floor%y >0) { // if the year is NOT a multiple of optimal rotation time, we need to take the year-th value of vector modulus how many rotations we have passed
1112  carbonPayment_y = carbonPrice_y*ir*(co2content_inventory*expectedVHa_y - FutureCarbonContent_opt[y - factor*cumTP_opt_floor]);
1113  } else { // if the year is EXACTLY a multiple of optimal rotation time, we need to take the last value of vector, not the first one
1114  carbonPayment_y = carbonPrice_y*ir*(co2content_inventory*expectedVHa_y - FutureCarbonContent_opt[cumTP_opt_floor]);
1115  }
1116  }
1117 
1118  raw_amount_carbon += carbonPayment_y*pow(1+ir,cumTp_u - y); // all payments put at end of rotation
1119  }
1120  //Now we add the final payment corresponding to carbon sequestered in long-lived products
1121  double pickling_pp = gpd("pickling",regId,priProducts[pp]);
1122  double CarbonPrice_cumTp = futureCarbonPrices[floor(cumTp_u)];
1123  raw_amount_carbon += CarbonPrice_cumTp * pickling_pp * finalHarvestFlag * vHa * app(priProducts[pp],ft,dc);
1124  // And we annualise carbon revenues and add them to annualised timber revenues
1125  double anualised_amount_carbon = MD->calculateAnnualisedEquivalent(raw_amount_carbon,cumTp_u, ir);
1126  anualised_amount += anualised_amount_carbon;
1127  //px->expectedReturnsCarbon = anualised_amount_carbon; TODO
1128 
1129  // If the new expected revenues is higher than previously, we store the maximum value and corresponding dc.
1130  if (anualised_amount>expReturns_hartman) {
1131  expReturns_hartman=anualised_amount;
1132  optDc_hartman = u;
1133  }
1134  } // end foreach primary product
1135  } // end foreach DC
1136 
1137  // Here we send results to pixel because they are final
1138  px->expectedReturnsNotCorrByRa.push_back(expReturns_hartman);
1139  if(MD->getBoolSetting("heterogeneousRiskAversion",DATA_NOW)){
1140  double ra = px->getDoubleValue("ra");
1141  double cumMort = 1-px->cumAlive_exp.at(j).at(optDc_hartman);
1142  //cout << px->getID() << "\t" << ft << "\t\t" << "optDc" << optDc << "\t" << cumMort << endl;
1143  double origExpReturns_hartman = expReturns_hartman;
1144  expReturns_hartman = origExpReturns_hartman * (1.0 - ra*cumMort);
1145  }
1146  px->expectedReturns.push_back(expReturns_hartman);
1147 
1148  // Now we replace results from the previous loop with the new ones
1149  expectedReturns.at(j) = expReturns_hartman;
1150  rotation_dc.at(j) = optDc_hartman;
1151 
1152 
1153  } // end foreach forest type
1154 
1155  } // end IF Hartman reference is BAU (no policy)----------------------------------------------------------------
1156 
1157  // End of added second carbon project---------------------------------------------------------------------------
1158 
1159  for(uint j=0;j<fTypes.size();j++){
1160  string ft = fTypes[j];
1161  forType* thisFt = MTHREAD->MD->getForType(ft);
1162 
1163  double harvestedAreaForThisFT = vSum(px->hArea.at(j))*(1-fArea_decr_rel); // gfd("harvestedArea",regId,ft,DIAM_ALL);
1164  deltaAreas[j][fTypes.size()] += vSum(px->hArea.at(j))*(fArea_decr_rel);
1165  vector<double> corrExpectedReturns(fTypes.size(),0.0); // corrected expected returns (considering transaction costs). These are in form of NPV
1166  vector<double> polBal_fiSub_unitvalue(fTypes.size(),0.0); // subside (tax) per ha per destination ft €/ha
1167 
1168  // C - Computing the corrected expected returns including transaction costs
1169 
1170  for(uint j2=0;j2<fTypes.size();j2++){
1171  string ft2 = fTypes[j2];
1172  double invTransCost = (gfd("invTransCost",regId,ft,ft2,DATA_NOW) * gfd("pol_fiSub",regId,ft,ft2,DATA_NOW) ) ;
1173  polBal_fiSub_unitvalue[j2] = gfd("invTransCost",regId,ft,ft2,DATA_NOW) * (gfd("pol_fiSub",regId,ft,ft2,DATA_NOW) - 1) ;
1174  corrExpectedReturns[j2] = (expectedReturns[j2]/ir)-invTransCost; // changed 20150718: npv = eai/ir + tr. cost // HUGE BUG 20151202: transaction costs should be REDUCED, not added to the npv...
1175  }
1176 
1177  //int highestReturnFtIndex = getMaxPos(corrExpectedReturns);
1178 
1179  // D - Assigning the Managed area
1180  // calculating freeArea at the end of the year and choosing the new regeneration area..
1181  //freeArea(i,essence,lambda) = sum(u, hv2fa(i,essence,lambda,u)*hr(u,i,essence,lambda,t)*V(u,i,lambda,essence,t-1)*100);
1182  //if(scen("endVreg") ,
1183  // regArea(i,essence,lambda,t) = freeArea(i,essence, lambda); // here we could introduce in/out area from other land usages
1184  //else
1185  // loop (i,
1186  // loop( (essence,lambda),
1187  // if ( expReturns(i,essence,lambda) = smax( (essence2,lambda2),expReturns(i,essence2,lambda2) ),
1188  // regArea (i,essence,lambda,t) = sum( (essence2, lambda2), freeArea(i,essence2, lambda2) ) * mr;
1189  // );
1190  // );
1191  // regArea(i,essence,lambda,t) = freeArea(i,essence, lambda)*(1-mr); // here we could introduce in/out area from other land usages
1192  // );
1193  //if (j==highestReturnFtIndex){
1194  // thisYearRegAreas[j] += totalHarvestedArea*mr;
1195  //}
1196  // If I Implement this I'll have a minimal diff in total area.. why ?????
1197 
1198  int optFtChosen = getMaxPos(corrExpectedReturns);
1199  px->optFtChosen.push_back(optFtChosen);
1200  px->optDcChosen.push_back(px->optDc.at(optFtChosen));
1201 
1202  thisYearRegAreas[getMaxPos(corrExpectedReturns)] += harvestedAreaForThisFT*mr;
1203  thisYearRegAreas[getMaxPos(expectedReturns)] += fArea_incr*mr/((double)fTypes.size()); // mr quota of new forest area assigned to highest expected returns ft (not considering transaction costs). Done for each forest types
1204 
1205  deltaAreas[j][getMaxPos(corrExpectedReturns)] += harvestedAreaForThisFT*mr;
1206  deltaAreas[fTypes.size()][getMaxPos(expectedReturns)] += fArea_incr*mr/((double)fTypes.size());
1207 
1208  polBal_fiSub[getMaxPos(corrExpectedReturns)] += harvestedAreaForThisFT*mr*polBal_fiSub_unitvalue[getMaxPos(corrExpectedReturns)];
1209 
1210  // E - Assigning unmanaged area
1211  //for(uint j2=0;j2<fTypes.size();j2++){
1212  if(natRegAllocation=="pp"){ // according to prob presence
1213  //string ft2 = fTypes[j2];
1214  string parentFt = MTHREAD->MD->getForTypeParentId(ft);
1215  double freq = rescaleFrequencies ? gfd("freq_norm",regId,parentFt,""):gfd("freq",regId,parentFt,""); // "probability of presence" for unmanaged forest, added 20140318
1216  double hAreaThisFtGroup = findMap(hAreaByFTypeGroup,parentFt);
1217  double hRatio = 1.0;
1218  if(hAreaThisFtGroup>0){
1219  //double harvestedAreaForThisFT2 = vSum(px->hArea.at(j2));
1220  hRatio = harvestedAreaForThisFT/hAreaThisFtGroup;
1221  } else {
1222  int nFtChilds = MTHREAD->MD->getNForTypesChilds(parentFt);
1223  hRatio = 1.0/nFtChilds;
1224  }
1225  thisYearRegAreas[j] += totalHarvestedArea*(1-mr)*freq*hRatio;
1226  thisYearRegAreas[j] += fArea_incr*(1-mr)*freq*hRatio; // non-managed quota of new forest area assigning proportionally on pp at sp group level
1227  //thisYearRegAreas[j2] += harvestedAreaForThisFT*(1-mr)*freq*hRatio;
1228  // Here is the problem with building a transaction matrix ft_from/ft_to: we don't explicitly assign from/to for unmanaged areas !
1229  for(uint j2=0;j2<fTypes.size();j2++){
1230  deltaAreas[j2][j] += vSum(px->hArea.at(j2))*(1-fArea_decr_rel)*(1-mr)*freq*hRatio;
1231  }
1232  deltaAreas[fTypes.size()][j] += fArea_incr*(1-mr)*freq*hRatio;
1233 
1234 
1235 
1236  } else { // prob presence not used..
1237 
1238  // Accounting for mortality arising from pathogens. Assigning the area to siblings according to area..
1239  // TODO: chek that mortRatePath doesn't involve adding forest area to deltaAreas[j][""]
1240 
1241  double mortRatePath = px->getPathMortality(ft, "0");
1242  if(mortRatePath > 0){
1243 
1244  string parentFt = MTHREAD->MD->getForTypeParentId(ft);
1245  vector <string> siblings = MTHREAD->MD->getForTypeChilds(parentFt);
1246  vector <double> siblingAreas;
1247  for(uint j2=0;j2<siblings.size();j2++){
1248  if(siblings[j2]==ft){
1249  siblingAreas.push_back(0.0);
1250  } else {
1251  //string debug_sibling_ft = siblings[j2];
1252  //int debug_positin = getPos(debug_sibling_ft,fTypes);
1253  double thisSiblingArea = vSum(px->area.at(getPos(siblings[j2],fTypes)));
1254  siblingAreas.push_back(thisSiblingArea);
1255  }
1256  }
1257  double areaAllSiblings = vSum(siblingAreas);
1258  thisYearRegAreas[j] += harvestedAreaForThisFT*(1-mr)*(1-mortRatePath);
1259  deltaAreas[j][j] += harvestedAreaForThisFT*(1-mr)*(1-mortRatePath);
1260 
1261  if(areaAllSiblings>0.0){ // area of siblings is >0: we attribute the area from the pathogen induced mortality to the siblings proportionally to area..
1262  for(uint j2=0;j2<siblings.size();j2++){
1263 // int debug1 = getPos(siblings[j2],fTypes);
1264 // double debug2= harvestedAreaForThisFT;
1265 // double debug3 = 1.0-mr;
1266 // double debug4 = mortRatePath;
1267 // double debug5 = siblingAreas[j2];
1268 // double debug6 = areaAllSiblings;
1269 // double debug7 = harvestedAreaForThisFT*(1.0-mr)*(mortRatePath)*(siblingAreas[j2]/areaAllSiblings);
1270  thisYearRegAreas[getPos(siblings[j2],fTypes)] += harvestedAreaForThisFT*(1.0-mr)*(mortRatePath)*(siblingAreas[j2]/areaAllSiblings);
1271  deltaAreas[j][getPos(siblings[j2],fTypes)] += harvestedAreaForThisFT*(1.0-mr)*(mortRatePath)*(siblingAreas[j2]/areaAllSiblings);
1272  }
1273  } else if (siblings.size()>1) { // area of all siblings is 0, we just give them the mortality area in equal parts..
1274  for(uint j2=0;j2<siblings.size();j2++){
1275  if (siblings[j2] != ft){
1276  thisYearRegAreas[getPos(siblings[j2],fTypes)] += harvestedAreaForThisFT*(1.0-mr)*(mortRatePath)* 1.0 / (( (float) siblings.size())-1.0);
1277  deltaAreas[j][getPos(siblings[j2],fTypes)] += harvestedAreaForThisFT*(1.0-mr)*(mortRatePath)* 1.0 / (( (float) siblings.size())-1.0);
1278  }
1279  }
1280  }
1281  } else { // mortRatePath == 0
1282  thisYearRegAreas[j] += harvestedAreaForThisFT*(1.0-mr);
1283  deltaAreas[j][j] += harvestedAreaForThisFT*(1.0-mr);
1284  }
1285 
1286  // Allocating non-managed quota of new forest area to ft proportionally to the current area share by ft
1287  double newAreaThisFt = vSum(px->area) ? fArea_incr*(1-mr)*vSum(px->area.at(j))/vSum(px->area): 0.0;
1288  thisYearRegAreas[j] += newAreaThisFt;
1289  deltaAreas[fTypes.size()][j] += newAreaThisFt;
1290 
1291  if(! (thisYearRegAreas[j] >= 0.0) ){
1292  msgOut(MSG_ERROR,"thisYearRegAreas[j] is not non negative (j: "+i2s(j)+", thisYearRegAreas[j]: "+i2s( thisYearRegAreas[j])+").");
1293  }
1294  //thisYearRegAreas[j2] += harvestedAreaForThisFT*(1-mr);
1295  }
1296  //}
1297  } // end for each forest type
1298 
1299  // adding regeneration area to the fist (00) diameter class
1300  for(uint j=0;j<fTypes.size();j++){
1301  px->area.at(j).at(0) += thisYearRegAreas.at(j);
1302  }
1303 
1304  #ifdef QT_DEBUG
1305  double totalRegArea = vSum(thisYearRegAreas);
1306  if (! (totalRegArea==0.0 && totalHarvestedArea==0.0)){
1307  double ratio = totalRegArea / totalHarvestedArea ;
1308  if(rescaleFrequencies && (ratio < 0.99999999999 || ratio > 1.00000000001) && forestAreaChangeMethod == "fixed") {
1309  msgOut(MSG_CRITICAL_ERROR, "Sum of regeneration areas not equal to sum of harvested area in runManagementModel()!");
1310  }
1311  }
1312  #endif
1313 
1314  px->regArea.insert(pair <int, vector<double> > (MTHREAD->SCD->getYear(), thisYearRegAreas));
1315  px->deltaArea = deltaAreas;
1316 
1317  } // end of each pixel
1318  if (-fArea_diff > regHArea){
1319  msgOut(MSG_WARNING,"In region "+ i2s(regId) + " the exogenous area decrement ("+ d2s(-fArea_diff) +" ha) is higger than the harvesting ("+ d2s(regHArea) +" ha). Ratio forced to 1.");
1320  }
1321 
1322  for(uint j=0;j<fTypes.size();j++){
1323  double debug = polBal_fiSub[j]/1000000;
1324  sfd((polBal_fiSub[j]/1000000.0),"polBal_fiSub",regId,fTypes[j],"",DATA_NOW,true); //M€
1325 
1326  }
1327  } // end of each region
1328 }
1329 
1330 void
1332  msgOut(MSG_INFO, "Cashing initial model settings..");
1333  MD = MTHREAD->MD;
1334  firstYear = MD->getIntSetting("initialYear");
1335  secondYear = firstYear+1;
1336  thirdYear = firstYear+2;
1337  WL2 = MD->getIntSetting("worldCodeLev2");
1338  regIds2 = MD->getRegionIds(2);
1339  priProducts = MD->getStringVectorSetting("priProducts");
1340  secProducts = MD->getStringVectorSetting("secProducts");
1342  allProducts.insert( allProducts.end(), secProducts.begin(), secProducts.end() );
1343  dClasses = MD->getStringVectorSetting("dClasses");
1344  pDClasses; // production diameter classes: exclude the fist diameter class below 15 cm
1345  pDClasses.insert(pDClasses.end(), dClasses.begin()+1, dClasses.end() );
1346  fTypes= MD->getForTypeIds();
1347  l2r = MD->getRegionIds();
1348 }
1349 
1350 void
1352  // Settings needs explicitly DATA_NOW to have a temporal dymension..
1353  msgOut(MSG_INFO, "Cashing model settings that may change every year..");
1354  regType = MTHREAD->MD->getStringSetting("regType",DATA_NOW); // how the regeneration should be computed (exogenous, from hr, from allocation choises)
1355  natRegAllocation = MTHREAD->MD->getStringSetting("natRegAllocation",DATA_NOW); // how to allocate natural regeneration
1356  rescaleFrequencies = MD->getBoolSetting("rescaleFrequencies",DATA_NOW);
1357  oldVol2AreaMethod = MD->getBoolSetting("oldVol2AreaMethod",DATA_NOW);
1358  //mr = MD->getDoubleSetting("mr");
1359  forestAreaChangeMethod = MTHREAD->MD->getStringSetting("forestAreaChangeMethod",DATA_NOW);
1360  ir = MD->getDoubleSetting("ir",DATA_NOW);
1361 }
1362 
1363 void
1365  msgOut(MSG_INFO, "Starting initializing pixel-level values");
1366 
1367  // pxVol = regVol * pxArea/regForArea
1368  // this function can be done only at the beginning of the model, as it assume that the distribution of volumes by diameter class in the pixels within a certain region is homogeneous, but as the model progress along the time dimension this is no longer true.
1369  if(!MD->getBoolSetting("usePixelData")) return;
1370  for(uint i=0;i<regIds2.size();i++){
1371  ModelRegion* reg = MD->getRegion(regIds2[i]);
1372  vector <Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regIds2[i]);
1373  for (uint j=0;j<rpx.size();j++){
1374  //int debugPx = rpx[j]->getID();
1375  //int debug2 = debugPx;
1376  rpx[j]->vol.clear(); // not actually necessary
1377  for(uint y=0;y<fTypes.size();y++){
1378  vector <double> vol_byu;
1379  double regForArea = reg->getValue("forArea_"+fTypes[y]);
1380  for (uint z=0;z<dClasses.size();z++){
1381  double regVol;
1382  regVol = z ? gfd("vol",regIds2[i],fTypes[y],dClasses[z],firstYear) : 0 ; // if z=0-> regVol= gfd(), otherwise regVol=0;
1383  double pxArea = rpx[j]->getDoubleValue("forArea_"+fTypes[y], true); // bug solved 20121109. get zero for not data
1384  if (pxArea<0.0){
1385  msgOut(MSG_CRITICAL_ERROR,"Error in initializePixelVolumes, negative pxArea!");
1386  }
1387  double pxVol = regForArea ? regVol * pxArea/regForArea: 0; // if we introduce new forest types without initial area we must avoid a 0/0 division
1388  //rpx[j]->changeValue(pxVol,"vol",fTypes[y],dClasses[z],firstYear);
1389  vol_byu.push_back(pxVol);
1390  }
1391  rpx[j]->vol.push_back(vol_byu);
1392  }
1393  }
1394  }
1396 }
1397 
1398 /**
1399  * @brief ModelCoreSpatial::assignSpMultiplierPropToVols assigns the spatial multiplier (used in the time of return) based no more on a Normal distribution but on the volumes present in the pixel: more volume, more the pixel is fit for the ft
1400  *
1401  * This function apply to the pixel a multiplier of time of passage that is inversely proportional to the volumes of that forest type present in the pixel.
1402  * The idea is that in the spots where we observe more of a given forest type are probabily the most suited ones to it.
1403  *
1404  * The overall multipliers **of time of passage** (that is, the one returned by Pixel::getMultiplier("tp_multiplier") ) will then be the product of this multiplier that account for spatial heterogeneity and of an eventual exogenous
1405  * multiplier that accounts for different scenarios among the spatio-temporal dimensions.
1406  *
1407  * Given that (forest type index omitted):
1408  * - \f$V_{p}\f$ = volume of a given ft in each pixel (p)
1409  * - \f$\bar{g}\f$ and \f$\sigma_{g}\f$ = regional average and standard deviation of the growth rate
1410  * - \f$m_{p}\f$ = multiplier of time of passage
1411  *
1412  * This multiplier is computed as:
1413  * - \f$ v_{p} = max(V) - V_{p}~~ \f$ A diff from the max volume is computed in each pixel
1414  * - \f$ vr_{p} = v_{p} * \bar{g}/\bar{v}~~ \f$ The volume diff is rescaled to match the regional growth rate
1415  * - \f$ vrd_{p} = vr_{p} - \bar{vr}~~ \f$ Deviation of the rescaled volumes are computed
1416  * - \f$ vrdr_{p} = vrd_{p} * \sigma_{g}/\sigma_{vr}~~ \f$ The deviations are then rescaled to match the standard deviations of the regional growth rate
1417  * - \f$ m_{p} = (vrdr_{p} + \bar{vr}) / \bar{g}~~ \f$ The multiplier is computed from the ratio of the average rescaled volumes plus rescaled deviation over the average growth rate.
1418  *
1419  * And it has the following properties:
1420  * - \f$\bar{m} = 1\f$
1421  * - \f$\sigma_{m} = cv_{g}\f$
1422  * - \f$m_{p} = V_{p}*\alpha+\beta\f$
1423  * - \f$m_{\bar{V}} = 1\f$
1424  *
1425  * For spreadsheet "proof" see the file computation_of_growth_multipliers_from_know_avg_sd_and_proportional_to_share_of_area_in_each_pixel.ods
1426  */
1427 void
1429 
1430  if(!MTHREAD->MD->getBoolSetting("useSpatialVarPropToVol")){return;}
1431  for(uint r=0;r<regIds2.size();r++){
1432  int rId = regIds2[r];
1433  ModelRegion* reg = MD->getRegion(regIds2[r]);
1434  vector <Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regIds2[r]);
1435  for(uint f=0;f<fTypes.size();f++){
1436  string ft = fTypes[f];
1437  double agr = gfd("agr",regIds2[r],ft,"");
1438  double sStDev = gfd("sStDev",regIds2[r],ft,"");
1439  vector<double> vols;
1440  vector<double> diffVols;
1441  vector<double> diffVols_rescaled;
1442  double diffVols_rescaled_deviation;
1443  double diffVols_rescaled_deviation_rescaled;
1444  double final_value;
1445  double multiplier;
1446  vector<double> multipliers; // for tests
1447 
1448  double vol_max, rescale_ratio_avg, rescale_ratio_sd;
1449  double diffVols_avg, diffVols_rescaled_avg;
1450  double diffVols_rescaled_sd;
1451 
1452  for (uint p=0;p<rpx.size();p++){
1453  Pixel* px = rpx[p];
1454  vols.push_back(vSum(px->vol[f]));
1455  } // end for each pixel
1456  vol_max=getMax(vols);
1457 
1458  for(uint p=0;p<vols.size();p++){
1459  diffVols.push_back(vol_max-vols[p]);
1460  }
1461 
1462  diffVols_avg = getAvg(diffVols);
1463  rescale_ratio_avg = (diffVols_avg != 0.0) ? agr/diffVols_avg : 1.0;
1464  for(uint p=0;p<diffVols.size();p++){
1465  diffVols_rescaled.push_back(diffVols[p]*rescale_ratio_avg);
1466  }
1467  diffVols_rescaled_avg = getAvg(diffVols_rescaled);
1468  diffVols_rescaled_sd = getSd(diffVols_rescaled,false);
1469 
1470  rescale_ratio_sd = (diffVols_rescaled_sd != 0.0) ? sStDev/diffVols_rescaled_sd : 1.0;
1471  for(uint p=0;p<diffVols_rescaled.size();p++){
1472  diffVols_rescaled_deviation = diffVols_rescaled[p] - diffVols_rescaled_avg;
1473  diffVols_rescaled_deviation_rescaled = diffVols_rescaled_deviation * rescale_ratio_sd;
1474  final_value = diffVols_rescaled_avg + diffVols_rescaled_deviation_rescaled;
1475  multiplier = (agr != 0.0) ? min(1.6, max(0.4,final_value/agr)) : 1.0; //20151130: added bounds for extreme cases. Same bonds as in Gis::applySpatialStochasticValues()
1476  // multiplier = 1.0;
1477 
1478  Pixel* px = rpx[p];
1479  px->setSpModifier(multiplier,f);
1480  multipliers.push_back(multiplier);
1481  }
1482 
1483  #ifdef QT_DEBUG
1484  // Check relaxed as we introduced bounds that may change slighly the avg and sd...
1485  double avgMultipliers = getAvg(multipliers);
1486  double sdMultipliers = getSd(multipliers,false);
1487  if ( avgMultipliers < 0.9 || avgMultipliers > 1.1){
1488  msgOut(MSG_CRITICAL_ERROR, "The average of multipliers of ft "+ ft +" for the region " + i2s(rId) + " is not 1!");
1489  }
1490  if ( ( sdMultipliers - (sStDev/agr) ) < -0.5 || ( sdMultipliers - (sStDev/agr) ) > 0.5 ){
1491  double cv = sStDev/agr;
1492  msgOut(MSG_CRITICAL_ERROR, "The sd of multipliers of ft "+ ft +" for the region " + i2s(rId) + " is not equal to the spatial cv for the region!");
1493  }
1494  #endif
1495  } // end for each ft
1496  } // end for each region
1497 }
1498 
1499 
1500 
1501 void
1503 
1504  ///< call initialiseDeathBiomassStocks(), initialiseProductsStocks() and initialiseEmissionCounters()
1506 
1507  for(uint i=0;i<regIds2.size();i++){
1508  vector<double> deathBiomass;
1509  for(uint j=0;j<fTypes.size();j++){
1510  double deathBiomass_ft = gfd("vMort",regIds2[i],fTypes[j],DIAM_ALL,DATA_NOW);
1511  deathBiomass.push_back(deathBiomass_ft);
1512  }
1513  MTHREAD->CBAL->initialiseDeathBiomassStocks(deathBiomass, regIds2[i]);
1514  vector<double>qProducts;
1515  for(int p=0;p<priProducts.size();p++){
1516  // for the primary products we consider only the exports as the domestic consumption is entirelly transformed in secondary products
1517  double int_exports = gpd("sa",regIds2[i],priProducts[p],DATA_NOW);
1518  qProducts.push_back(int_exports);
1519  }
1520  for(int p=0;p<secProducts.size();p++){
1521  // for the tranformed product we skip those that are imported, hence derived from other forest systems
1522  double consumption = gpd("dl",regIds2[i],secProducts[p],DATA_NOW); // dl = sl + net regional imports
1523  qProducts.push_back(consumption);
1524  }
1525  MTHREAD->CBAL->initialiseProductsStocks(qProducts, regIds2[i]);
1526 
1527  }
1528 }
1529 
1530 void
1532  int currentYear = MTHREAD->SCD->getYear();
1533  for(int y=currentYear;y>currentYear-30;y--){
1534  for(uint i=0;i<regIds2.size();i++){
1535  for(uint j=0;j<fTypes.size();j++){
1536  for (uint u=0;u<dClasses.size();u++){
1537  iisskey key(y,regIds2[i],fTypes[j],dClasses[u]);
1539  }
1540  }
1541  }
1542  }
1543 }
1544 
1545 /**
1546  * @brief ModelCoreSpatial::initializePixelArea
1547  *
1548  * This function compute the initial area by ft and dc. It requires vHa computed in computeCumulativeData, this is why it is
1549  * separated form the other initialisedPixelValues().
1550  * As the sum of area computed using vHa may differ from the one memorised in forArea_* layer, all values are scaled to match
1551  * it before being memorised.
1552  * Also assign area = area_l
1553  */
1554 
1555 void
1557  msgOut(MSG_INFO, "Starting initializing pixel-level area");
1558  if(!MD->getBoolSetting("usePixelData")) return;
1559  for(uint i=0;i<regIds2.size();i++){
1560  ModelRegion* reg = MD->getRegion(regIds2[i]);
1561  vector <Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regIds2[i]);
1562  for (uint p=0;p<rpx.size();p++){
1563  Pixel* px = rpx[p];
1564  double pxid= px->getID();
1565  for(uint j=0;j<fTypes.size();j++){
1566  string ft = fTypes[j];
1567  vector <double> tempAreas;
1568  vector <double> areasByFt;
1569  double pxArea = px->getDoubleValue("forArea_"+ft,true)/10000.0; //ha
1570  for (uint u=0;u<dClasses.size();u++){
1571  if(u==0){
1572  double regionArea = reg->getValue("forArea_"+ft,OP_SUM)/10000.0; //ha
1573  double regRegVolumes = gfd("vReg",regIds2[i],ft,""); // regional regeneration volumes.. ugly name !!
1574  double newVReg = regionArea ? regRegVolumes*pxArea/regionArea : 0.0;
1575  double tp_u0 = px->tp.at(j).at(0); // time of passage to reach the first production diameter class
1576  double entryVolHa = gfd("entryVolHa",regIds2[i],ft,"");
1577  double tempArea = (newVReg*1000000.0/entryVolHa)*tp_u0;
1578  tempAreas.push_back(tempArea);
1579  } else {
1580  string dc = dClasses[u];
1581  double dcVol = px->vol_l.at(j).at(u)*1000000.0; // m^3
1582  double dcVHa = px->vHa.at(j).at(u); // m^3/ha
1583  #ifdef QT_DEBUG
1584  if(dcVol < 0.0 || dcVHa < 0.0){
1585  msgOut(MSG_CRITICAL_ERROR, "Negative volumes or density in initializePixelArea");
1586  }
1587  #endif
1588  double tempArea = dcVHa?dcVol/dcVHa:0;
1589  tempAreas.push_back(tempArea);
1590  }
1591 
1592  } // end dc
1593  double sumTempArea = vSum(tempAreas);
1594  // double sharedc0 = 5.0/90.0; // an arbitrary share of total area allocated to first diameter class
1595  //tempAreas.at(0) = sumTempArea * sharedc0;
1596  //sumTempArea = vSum(tempAreas);
1597  double normCoef = sumTempArea?pxArea/ sumTempArea:0;
1598  //cout << i << '\t' << pxid << '\t' << ft << '\t' << normCoef << endl;
1599  #ifdef QT_DEBUG
1600  if(normCoef < 0.0){
1601  msgOut(MSG_CRITICAL_ERROR, "Negative normCoef in initializePixelArea");
1602  }
1603  #endif
1604  for (uint u=0;u<dClasses.size();u++){
1605  areasByFt.push_back(tempAreas.at(u)*normCoef); //manca la costruzione originale del vettore
1606  }
1607  #ifdef QT_DEBUG
1608  if (pxArea != 0.0){
1609  double ratio = vSum(areasByFt)/ pxArea; // vSum(areasByFt) should be equal to pxArea
1610  if(ratio < 0.99999999999 || ratio > 1.00000000001) {
1611  msgOut(MSG_CRITICAL_ERROR, "pxArea is not equal to vSum(areasByFt) in initializePixelArea");
1612  }
1613  }
1614  #endif
1615  px->area_l.push_back(areasByFt);
1616  /// \todo here I have finally also area_ft_dc_px and I can implement the new one I am in 2006
1617  } // end ft
1618  px->area = px->area_l; //Assigning initial value of area to the area of the old year
1619  } // end px
1620  } // end region
1621  loadExogenousForestLayers("area");
1622  /// \todo: also update area_l
1623 }
1624 
1625 void
1627 
1628  msgOut(MSG_INFO, "Starting computing some cumulative values..");
1629  int thisYear = MTHREAD->SCD->getYear();
1630 
1631 // double sumCumTP=0;
1632 // double sumVHa = 0;
1633 // double count = 0;
1634 // double avg_sumCumTp;
1635 // double avg_sumVHa;
1636 
1637  for(uint r2= 0; r2<regIds2.size();r2++){
1638  int regId = regIds2[r2];
1639  regPx = MTHREAD->MD->getRegion(regId)->getMyPixels();
1640 
1641  for (uint p=0;p<regPx.size();p++){
1642  Pixel* px = regPx[p];
1643  px->cumTp.clear();
1644  px->cumTp_exp.clear();
1645  px->vHa_exp.clear();
1646  px->vHa.clear();
1647  px->cumAlive.clear();
1648  px->cumAlive_exp.clear();
1649  double expType = px->expType;
1650 
1651  for(uint j=0;j<fTypes.size();j++){
1652  string ft = fTypes[j];
1653 
1654  double tp_multiplier_now = px->getMultiplier("tp_multiplier",ft,DATA_NOW);
1655  double tp_multiplier_t0 = px->getMultiplier("tp_multiplier",ft,firstYear);
1656  double mortCoef_multiplier_now = px->getMultiplier("mortCoef_multiplier",ft,DATA_NOW);
1657  double mortCoef_multiplier_t0 = px->getMultiplier("mortCoef_multiplier",ft,firstYear);
1658  double betaCoef_multiplier_now = px->getMultiplier("betaCoef_multiplier",ft,DATA_NOW);
1659  double betaCoef_multiplier_t0 = px->getMultiplier("betaCoef_multiplier",ft,firstYear);
1660  double pathMort_now, pathMort_t0;
1661 
1662  // calculating the cumulative time of passage and the (cumulativelly generated) vHa for each diameter class (depending on forest owners diam growth expectations)
1663  //loop(u$(ord(u)=1),
1664  // cumTp(u,i,lambda,essence) = tp_u1(i,essence,lambda);
1665  //);
1666  //loop(u$(ord(u)>1),
1667  // cumTp(u,i,lambda,essence) = cumTp(u-1,i,lambda,essence)+tp(u-1,i,lambda,essence);
1668  //);
1669  ////ceil(x) DNLP returns the smallest integer number greater than or equal to x
1670  //loop( (u,i,lambda,essence),
1671  // cumTp(u,i,lambda,essence) = ceil(cumTp(u,i,lambda,essence));
1672  //);
1673  vector <double> cumTp_temp; // cumulative time of passage to REACH a diameter class (tp is to LEAVE to the next one)
1674  vector <double> vHa_temp; // volume at hectar by each diameter class [m^3/ha]
1675  vector <double> cumAlive_temp; // cumulated alive rate to reach a given diameter class
1676  vector <double> cumTp_exp_temp; // expected version of cumTp_temp
1677  vector <double> vHa_exp_temp; // expected version of vHa_temp
1678  vector <double> cumAlive_exp_temp; // "expected" version of cumMort
1679 
1680  MD->setErrorLevel(MSG_NO_MSG); // as otherwise on 2007 otherwise sfd() will complain that is filling multiple years (2006 and 2007)
1681  for (uint u=0; u<dClasses.size(); u++){
1682  string dc = dClasses[u];
1683  double cumTp_u, cumTp_u_exp, cumTp_u_noExp, cumTp_u_fullExp;
1684  double tp, tp_exp, tp_noExp, tp_fullExp;
1685  double vHa_u, vHa_u_exp, vHa_u_noExp, vHa_u_fullExp, beta, beta_exp, beta_noExp, beta_fullExp, mort, mort_exp, mort_noExp, mort_fullExp;
1686  double cumAlive_u, cumAlive_exp_u;
1687  pathMort_now = px->getPathMortality(ft,dc,DATA_NOW);
1688  pathMort_t0 = px->getPathMortality(ft,dc,firstYear);
1689  // only cumTp is depending for the expectations, as it is what it is used by owner to calculate return of investments.
1690  // the tp, beta and mort coefficients instead are the "real" ones as predicted by scientist for that specific time
1691 
1692  if(u==0) {
1693  // first diameter class.. expected and real values are the same (0)
1694  cumTp_u = 0.;
1695  vHa_u = 0.;
1696  cumAlive_u = 1.;
1697  cumTp_temp.push_back(cumTp_u);
1698  vHa_temp.push_back(vHa_u);
1699  cumTp_exp_temp.push_back(cumTp_u);
1700  vHa_exp_temp.push_back(vHa_u);
1701  cumAlive_temp.push_back(cumAlive_u);
1702  cumAlive_exp_temp.push_back(cumAlive_u);
1703  } else {
1704  // other diameter classes.. first dealing with real values and then with expected ones..
1705  // real values..
1706  // real values..
1707  tp = gfd("tp",regId,ft,dClasses[u-1],thisYear)*tp_multiplier_now;
1708  cumTp_u = cumTp_temp[u-1] + tp;
1709  if (u==1){
1710  /**
1711  Note on the effect of mortality modifiers on the entryVolHa.
1712  Unfortunatly for how it is defined the mortality multiplier (the ratio with the new mortality rate over the old one) we can't
1713  compute a entryVolHa based on it. It is NOT infact just like: vHa_adjusted = vHa_orig / mort_multiplier.
1714  The effect of mortality on the vHa of the first diameter class is unknow, and so we can't compute the effect of a relative
1715  increase.
1716  */
1717  vHa_u = gfd("entryVolHa",regId,ft,"",thisYear);
1718  mort = 0.; // not info about mortality first diameter class ("00")
1719  } else {
1720  mort = 1-pow(1-gfd("mortCoef",regId,ft,dClasses[u-1],thisYear)*mortCoef_multiplier_now+pathMort_now,tp); // mortality of the previous diameter class
1721  beta = gfd("betaCoef",regId,ft,dc, thisYear)*betaCoef_multiplier_now;
1722  vHa_u = vHa_temp[u-1]*beta*(1-mort);
1723  }
1724  cumAlive_u = max(0.,cumAlive_temp[u-1]*(1-mort));
1725  cumAlive_temp.push_back(cumAlive_u);
1726  cumTp_temp.push_back(cumTp_u);
1727  vHa_temp.push_back(vHa_u);
1728  // expected values..
1729  /**
1730  param expType Specify how the forest owners (those that make the investments) behave will be the time of passage in the future in order to calculate the cumulative time of passage in turn used to discount future revenues.
1731  Will forest owners behave adaptively believing the time of passage between diameter classes will be like the observed one at time they make decision (0) or they will have full expectations believing forecasts (1) or something in the middle ?
1732  For compatibility with the GAMS code, a -1 value means using initial simulation tp values (fixed cumTp)."
1733  */
1734  if (expType == -1){
1735  tp_exp = gfd("tp",regId,ft,dClasses[u-1],firstYear)*tp_multiplier_t0;
1736  //tp = px->tp.at(u); no. not possible, tp stored at pixel level is the current year one
1737  cumTp_u_exp = cumTp_exp_temp[u-1]+tp_exp;
1738  cumTp_exp_temp.push_back(cumTp_u_exp);
1739  if(u==1) {
1740  vHa_u_exp = gfd("entryVolHa",regId,ft,"",firstYear);
1741  mort_exp = 0.; // not info about mortality first diameter class ("00")
1742  } else {
1743  mort_exp = 1-pow(1-gfd("mortCoef",regId,ft,dClasses[u-1], firstYear)*mortCoef_multiplier_t0+pathMort_t0,tp_exp); // mortality rate of previous diameter class
1744  beta_exp = gfd("betaCoef",regId,ft,dc, firstYear)*betaCoef_multiplier_t0;
1745  vHa_u_exp = vHa_exp_temp[u-1]*beta_exp*(1-mort_exp);
1746  }
1747  } else {
1748  double tp_multiplier_dynamic = px->getMultiplier("tp_multiplier",ft,thisYear+ceil(cumTp_exp_temp[u-1]));
1749  tp_noExp = gfd("tp",regId,ft,dClasses[u-1])*tp_multiplier_now;
1750  cumTp_u_noExp = cumTp_exp_temp[u-1]+tp_noExp;
1751  tp_fullExp = gfd("tp",regId,ft,dClasses[u-1],thisYear+ceil(cumTp_exp_temp[u-1]))*tp_multiplier_dynamic ; // time of passage that there should be to reach this diameter class in the year where the previous diameter class will be reached
1752  cumTp_u_fullExp = cumTp_exp_temp[u-1]+tp_fullExp ; // it adds to the time of passage to reach the previous diameter class the time of passage that there should be to reach this diameter class in the year where the previous diameter class will be reached
1753  cumTp_u_exp = cumTp_u_fullExp*expType+cumTp_u_noExp*(1-expType); // 20121108: it's math the same as cumTp_exp_temp[u-1] + tp
1754  cumTp_exp_temp.push_back(cumTp_u_exp);
1755  if(u==1) {
1756  vHa_u_noExp = gfd("entryVolHa",regId,ft,"",DATA_NOW);
1757  vHa_u_fullExp = gfd("entryVolHa",regId,ft,"",thisYear+ceil(cumTp_u));
1758  vHa_u_exp = vHa_u_fullExp*expType+vHa_u_noExp*(1-expType);
1759  mort_exp = 0.; // not info about mortality first diameter class ("00")
1760  } else {
1761  mort_noExp = 1-pow(1-min(1.0,gfd("mortCoef",regId,ft,dClasses[u-1], DATA_NOW)*mortCoef_multiplier_now+pathMort_now), tp_noExp); // mortCoef is a yearly value. Mort coeff between class is 1-(1-mortCoeff)^tp
1762  double mortCoef_multiplier_dynamic = px->getMultiplier("mortCoef_multiplier",ft,thisYear+ceil(cumTp_exp_temp[u-1]));
1763  double pathMort_dynamic = px->getPathMortality(ft,dc,thisYear+ceil(cumTp_exp_temp[u-1]));
1764  mort_fullExp = 1-pow(1-min(1.0,gfd("mortCoef",regId,ft,dClasses[u-1],thisYear+ceil(cumTp_exp_temp[u-1]))*mortCoef_multiplier_dynamic+pathMort_dynamic),tp_fullExp); // mortality of the previous diameter class
1765  //double debug1 = gfd("mortCoef",regId,ft,dClasses[u-1],thisYear+ceil(cumTp_exp_temp[u-1]));
1766  //double debug2 = debug1*mortCoef_multiplier_dynamic+pathMort_dynamic;
1767  //double debug3 = min(1.0,debug2);
1768  //double debug4 = 1.0-debug3;
1769  //double debug5 = pow(debug4,tp_fullExp);
1770  //double debug6 = 1.0-debug5;
1771 
1772 
1773  beta_noExp = gfd("betaCoef",regId,ft,dc, DATA_NOW)*betaCoef_multiplier_now;
1774  double betaCoef_multiplier_dynamic = px->getMultiplier("betaCoef_multiplier",ft,thisYear+ceil(cumTp_u));
1775  beta_fullExp = gfd("betaCoef",regId,ft,dc, thisYear+ceil(cumTp_u))*betaCoef_multiplier_dynamic;
1776  mort_exp = mort_fullExp*expType+mort_noExp*(1-expType);
1777  beta_exp = beta_fullExp*expType+beta_noExp*(1-expType);
1778  vHa_u_exp = vHa_exp_temp[u-1]*beta_exp*(1-mort_exp); // BUG !!! mort is yearly value, not between diameter class. SOLVED 20121108
1779  }
1780  }
1781  vHa_exp_temp.push_back(vHa_u_exp);
1782  cumAlive_exp_u = max(0.,cumAlive_exp_temp[u-1]*(1-mort_exp));
1783  cumAlive_exp_temp.push_back(cumAlive_exp_u);
1784 
1785  //cout << "********" << endl;
1786  //cout << "dc;mort;cumAlive;cumAlive_exp "<< endl ;
1787  //cout << dClasses[u] << ";"<< mort << ";" << cumAlive_u << ";" << cumAlive_exp_u << endl;
1788 
1789  }
1790  // debug stuff on vHa
1791  //double vHa_new = gfd("vHa",regId,ft,dc,DATA_NOW);
1792  //double hv2fa_old = gfd("hv2fa",regId,ft,dc,DATA_NOW);
1793  //cout << "Reg|Ft|dc|vHa (new)|1/hv2fa (old): " << regId << " | " << ft;
1794  //cout << " | " << dc << " | " << vHa_new << " | " << 1/hv2fa_old << endl;
1795 
1796  } // end of each diam
1797  //double pixID = px->getID();
1798  //cout << thisYear << ";"<< regIds2[r2] << ";" << pixID << ";" << ft << ";" << cumTp_exp_temp[3] << ";" << vHa_exp_temp[3] << endl;
1799  px->cumTp.push_back(cumTp_temp);
1800  px->vHa.push_back(vHa_temp);
1801  px->cumAlive.push_back(cumAlive_temp);
1802  px->cumTp_exp.push_back(cumTp_exp_temp);
1803  px->vHa_exp.push_back(vHa_exp_temp);
1804  px->cumAlive_exp.push_back(cumAlive_exp_temp);
1805 
1806  //sumCumTP += cumTp_exp_temp[3];
1807  //sumVHa += vHa_exp_temp[3];
1808  //count ++;
1809 
1810 
1811  } // end of each ft
1812  //double debug = 0.0;
1813  } // end of each pixel
1814  } // end of each region
1816  //avg_sumCumTp = sumCumTP/ count;
1817  //avg_sumVHa = sumVHa / count;
1818  //cout << "Avg sumCumTp_35 and sumVha_35: " << avg_sumCumTp << " and " << avg_sumVHa << " (" << count << ")" << endl;
1819  //exit(0);
1820 }
1821 
1822 void
1824  msgOut(MSG_INFO, "Starting resetting pixel level values");
1825  for(uint r2= 0; r2<regIds2.size();r2++){
1826  int regId = regIds2[r2];
1827  regPx = MTHREAD->MD->getRegion(regId)->getMyPixels();
1828  for (uint p=0;p<regPx.size();p++){
1829  Pixel* px = regPx[p];
1830  px->swap(VAR_VOL); // vol_l = vol
1831  px->swap(VAR_AREA); // area_l = area
1832  // 20121108 BUG! Solved, used empty (just return true if the vector is empty) instead of clear (it actually clears the vector)
1833  px->vol.clear(); // by ft,dc
1834  px->area = px->area_l; // ATTENCTION, DIFFERENT FROM THE OTHERS. Here it is not cleared, it is assigned the previous year as default
1835  /*px->area.clear(); // by ft,dc*/
1836  px->hArea.clear(); // by ft, dc
1837  px->deltaArea.clear();
1838  //px->regArea.clear(); // by year, ft NO, this one is a map, it doesn't need to be changed
1839  px->hVol.clear(); // by ft, dc
1840  px->hVol_byPrd.clear(); // by ft, dc, pp
1841  //px->in.clear(); // by pp
1842  //px->hr.clear(); // by pp
1843  px->vReg.clear(); // by ft
1844  px->expectedReturns.clear(); // by ft
1845  px->beta.clear();
1846  px->mort.clear();
1847  px->tp.clear();
1848  px->cumTp.clear();
1849  px->vHa.clear();
1850  px->cumTp_exp.clear();
1851  px->vHa_exp.clear();
1852  px->cumAlive.clear();
1853  px->cumAlive_exp.clear();
1854  px->vMort.clear();
1855  //std::fill(rpx[j]->vMort.begin(), rpx[j]->vMort.end(), 0.0);
1856 
1857  }
1858  }
1859 }
1860 
1861 void
1863 
1864  msgOut(MSG_INFO, "Starting cashing on pixel spatial-level exogenous data");
1865  bool applyAvalCoef = MTHREAD->MD->getBoolSetting("applyAvalCoef");
1866 
1867  for(uint r2= 0; r2<regIds2.size();r2++){
1868  int regId = regIds2[r2];
1869  regPx = MTHREAD->MD->getRegion(regId)->getMyPixels();
1870  for (uint p=0;p<regPx.size();p++){
1871  Pixel* px = regPx[p];
1872  px->tp.clear();
1873  px->beta.clear();
1874  px->mort.clear();
1875  px->avalCoef.clear();
1876 
1877  for(uint j=0;j<fTypes.size();j++){
1878  string ft = fTypes[j];
1879  vector <double> tp_byu;
1880  vector <double> beta_byu;
1881  vector <double> mort_byu;
1882 
1883  double tp_multiplier_now = px->getMultiplier("tp_multiplier",ft,DATA_NOW);
1884  double mortCoef_multiplier_now = px->getMultiplier("mortCoef_multiplier",ft,DATA_NOW);
1885  double betaCoef_multiplier_now = px->getMultiplier("betaCoef_multiplier",ft,DATA_NOW);
1886  double avalCoef_ft = applyAvalCoef?px->getSTData("avalCoef",ft,DATA_NOW):1.0;
1887 
1888  if(avalCoef_ft == MTHREAD->MD->getDoubleSetting("noValue")){
1889  msgOut(MSG_CRITICAL_ERROR,"applyAvalCoef has benn asked for, but I can't find the avalCoef in the data!");
1890  }
1891 
1892  for (uint u=0; u<dClasses.size(); u++){
1893  string dc = dClasses[u];
1894  double pathMortality = px->getPathMortality(ft,dc,DATA_NOW);
1895  double tp, beta_real, mort_real;
1896  if (u==0){
1897  // tp of first diameter class not making it change across the time dimension, otherwise problems in getting the rigth past
1898  // regenerations. BUT good, px->tp.at(0) is used only to pick up the right regeneration, so the remaining of the model
1899  // uses the getMultiplier version and cumTp consider the dynamic effects also in the first dc.
1900  tp = gfd("tp",regId,ft,dClasses[u],firstYear)*px->getMultiplier("tp_multiplier",ft,firstYear); // tp is defined also in the first diameter class, as it is the years to reach the NEXT diameter class
1901  } else {
1902  tp = gfd("tp",regId,ft,dClasses[u],DATA_NOW)*tp_multiplier_now; // tp is defined also in the first diameter class, as it is the years to reach the NEXT diameter class
1903  }
1904  beta_real = u?gfd("betaCoef",regId,ft,dClasses[u],DATA_NOW)*betaCoef_multiplier_now:0;
1905  mort_real = min(u?gfd("mortCoef",regId,ft,dClasses[u],DATA_NOW)*mortCoef_multiplier_now+pathMortality :0,1.0); //Antonello, bug fixed 20160203: In any case, natural plus pathogen mortality can not be larger than 1!
1906  tp_byu.push_back(tp);
1907  beta_byu.push_back(beta_real);
1908  mort_byu.push_back(mort_real);
1909  } // end of each dc
1910  px->tp.push_back(tp_byu);
1911  px->beta.push_back(beta_byu);
1912  px->mort.push_back(mort_byu);
1913  px->avalCoef.push_back(avalCoef_ft);
1914  } // end of each ft
1915  } // end of each pixel
1916  } // end of each region
1917 }
1918 
1919 void
1921  msgOut(MSG_INFO, "Starting computing inventary available for this year..");
1922  int nbounds = pow(2,priProducts.size());
1923  vector<vector<int>> concernedPriProductsTotal = MTHREAD->MD->createCombinationsVector(priProducts.size());
1924  int currentYear = MTHREAD->SCD->getYear();
1925 
1926  for(uint i=0;i<regIds2.size();i++){
1927  int r2 = regIds2[i];
1928  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
1929  //Gis* GIS = MTHREAD->GIS;
1930  regPx = REG->getMyPixels();
1931  vector <double> in_reg(priProducts.size(),0.); // should have ceated a vector of size priProducts.size(), all filled with zeros
1932  vector <double> in_deathTimber_reg(priProducts.size(),0.); // should have ceated a vector of size priProducts.size(), all filled with zeros
1933  for (uint p=0;p<regPx.size();p++){
1934  Pixel* px = regPx[p];
1935  //int debugPx = px->getID();
1936  //int debug2 = debugPx;
1937  //px->in.clear();
1938  for(uint pp=0;pp<priProducts.size();pp++){
1939  double in = 0;
1940  for(uint ft=0;ft<fTypes.size();ft++){
1941  for(uint dc=0;dc<dClasses.size();dc++){
1942  in += app(priProducts[pp],fTypes[ft],dClasses[dc])*px->vol_l.at(ft).at(dc)*px->avalCoef.at(ft);
1943  }
1944  }
1945  //px->in.push_back(in);
1946  in_reg.at(pp) += in;
1947  } // end of each priProduct
1948  } // end each pixel
1949 
1950 
1951  for(uint pp=0;pp<priProducts.size();pp++){
1952  vector<string> priProducts_vector;
1953  priProducts_vector.push_back(priProducts[pp]);
1954 
1955  double in_deathMortality = MD->getAvailableDeathTimber(priProducts_vector,r2,currentYear-1);
1956  in_deathTimber_reg.at(pp) += in_deathMortality;
1957 
1958  // Even if I fixed all the lower bounds to zero in Opt::get_bounds_info still the model
1959  // doesn't solve with no-forest in a region.
1960  // Even with 0.0001 doesn't solve !!
1961  // With 0.001 some scenarios doesn't solve in 2093
1962  // With 0.003 vRegFixed doesn't solve in 2096
1963  // Tried with 0.2 but no changes, so put it back on 0.003
1964  //spd(max(0.001,in_reg.at(pp)),"in",r2,priProducts[pp],DATA_NOW,true);
1965  spd(in_reg.at(pp),"in",r2,priProducts[pp],DATA_NOW,true);
1966  spd(in_deathTimber_reg.at(pp),"in_deathTimber",r2,priProducts[pp],DATA_NOW,true);
1967  #ifdef QT_DEBUG
1968  if (in_reg.at(pp) < -0.0){
1969  msgOut(MSG_CRITICAL_ERROR,"Negative inventory");
1970  }
1971  #endif
1972  }
1973 
1974  // ##### Now creating a set of bonds for the optimisation that account of the fact that the same ft,dc can be used for multiple products:
1975 
1976  // 20160928: Solved a big bug: for each combination instead of taking the UNION of the various priProduct inventory sets I was taking the sum
1977  // Now both the alive and the death timber are made from the union
1978  // 20150116: As the same (ft,dc) can be used in more than one product knowing -and bounding the supply in the optimisation- each single
1979  // in(pp) is NOT enought.
1980  // We need to bound the supply for each possible combination, that is for 2^(number of prim.pr)
1981  // Here we compute the detailed inventory. TO.DO: Create the pounds in Opt. done
1982  // 20160209: Rewritten and corrected a bug that was not giving enought inv to multiproduct combinations
1983  for (uint i=0; i<nbounds; i++){
1984  vector<int> concernedPriProducts = concernedPriProductsTotal[i];
1985  vector<string> concernedPriProducts_ids = positionsToContent(priProducts,concernedPriProducts);
1986  //double debug = 0.0;
1987  //for(uint z=0;z<concernedPriProducts.size();z++){
1988  // debug += gpd("in",r2,priProducts[concernedPriProducts[z]]); // to.do: this will need to be rewritten checked!
1989  //}
1990  double bound_alive = MD->getAvailableAliveTimber(concernedPriProducts_ids,r2); // From px->vol_l, as in "in"
1991  double bound_deathTimber = MD->getAvailableDeathTimber(concernedPriProducts_ids,r2,currentYear-1); // From deathTimberInventory map
1992  double bound_total = bound_alive + bound_deathTimber;
1993 
1994  REG->inResByAnyCombination[i] = bound_total;
1995  //REG->inResByAnyCombination_deathTimber[i] = bound_deathTimber;
1996  } // end for each bond
1997  } // end each region
1998 }
1999 
2000 void
2002  msgOut(MSG_INFO, "Updating map areas..");
2003 
2004  if (!oldVol2AreaMethod){
2005  if(!MD->getBoolSetting("usePixelData")) return;
2006  for(uint i=0;i<regIds2.size();i++){
2007  ModelRegion* reg = MD->getRegion(regIds2[i]);
2008  vector <Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regIds2[i]);
2009  for (uint p=0;p<rpx.size();p++){
2010  Pixel* px = rpx[p];
2011  double pxid= px->getID();
2012  for(uint j=0;j<fTypes.size();j++){
2013  string ft = fTypes[j];
2014  double forArea = vSum(px->area.at(j));
2015  #ifdef QT_DEBUG
2016  if(forArea < 0.0 ){
2017  msgOut(MSG_CRITICAL_ERROR, "Negative forArea in updateMapAreas");
2018  }
2019  #endif
2020  px->changeValue("forArea_"+ft, forArea*10000);
2021  } // end ft
2022  } // end px
2023  } // end region
2024  } else {
2025  int currentYear = MTHREAD->SCD->getYear();
2026  map<int,double> forestArea; // foresta area by each region
2027  pair<int,double > forestAreaPair;
2028  vector<int> l2Regions = MTHREAD->MD->getRegionIds(2, true);
2029  vector <string> fTypes = MTHREAD->MD->getForTypeIds();
2030  int nFTypes = fTypes.size();
2031  int nL2Regions = l2Regions.size();
2032  for(int i=0;i<nL2Regions;i++){
2033  int regId = l2Regions[i];
2034  vector<Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regId);
2035  for(int j=0;j<nFTypes;j++){
2036  string ft = fTypes[j];
2037  //double regForArea = reg->getValue("forArea_"+ft);
2038  //double harvestedArea = gfd("harvestedArea",regId,ft,DIAM_ALL);
2039  //double regArea = gfd("regArea",regId,ft,DIAM_ALL);
2040  //cout << "Regid/ft/area/harvested/regeneration: " <<regId<<";"<<ft<<";"<<regForArea<<";"<<harvestedArea<<";" <<regArea<<endl;
2041  //double newAreaNet = regArea-harvestedArea;
2042  //double newAreaRatio = newAreaNet / regForArea;
2043  for(uint z=0;z<rpx.size();z++){
2044  Pixel* px = rpx[z];
2045  double oldValue = px->getDoubleValue("forArea_"+ft,true)/10000;
2046  double hArea = vSum(px->hArea.at(j)); //bug 20140205 areas in the model are in ha, in the layer in m^2
2047  double regArea = findMap(px->regArea,currentYear).at(j); //bug 20140205 areas in the model are in ha, in the layer in m^2
2048  //double newValue = oldValue*(1. + newAreaRatio);
2049  double newValue = oldValue-hArea+regArea;
2050  double areaNetOfRegeneration = oldValue-hArea;
2051  #ifdef QT_DEBUG
2052  if (areaNetOfRegeneration<0.0){
2053  msgOut(MSG_CRITICAL_ERROR,"areaNetOfRegeneration negative in updateMapAreas");
2054  }
2055  if (newValue<0.0){
2056  msgOut(MSG_CRITICAL_ERROR,"for area negative in updateMapAreas");
2057  }
2058  #endif
2059  rpx[z]->changeValue("forArea_"+ft, newValue*10000);
2060  }
2061  }
2062  }
2063  }
2064 }
2065 
2066 void
2068 
2069 vector<int> l2Regions = MTHREAD->MD->getRegionIds(2, true);
2070 vector <string> fTypes = MTHREAD->MD->getForTypeIds();
2071 int nFTypes = fTypes.size();
2072 int nL2Regions = l2Regions.size();
2073 for(int i=0;i<nL2Regions;i++){
2074  int regId = l2Regions[i];
2075  vector<Pixel*> rpx = MTHREAD->GIS->getAllPlotsByRegion(regId);
2076  for(int j=0;j<nFTypes;j++){
2077  string ft = fTypes[j];
2078  for(uint z=0;z<rpx.size();z++){
2079  Pixel* px = rpx[z];
2080  double vol = vSum(px->vol.at(j));
2081  double expectedReturns = px->expectedReturns.at(j);
2082  if(MTHREAD->GIS->layerExist("vol_"+ft)){
2083  rpx[z]->changeValue("vol_"+ft, vol);
2084  }
2085  if(MTHREAD->GIS->layerExist("expectedReturns_"+ft)){
2086  rpx[z]->changeValue("expectedReturns_"+ft, expectedReturns);
2087  }
2088  }
2089  }
2090 }
2091 
2092 // update GUI image..
2093 for(int j=0;j<nFTypes;j++){
2094  string ft = fTypes[j];
2095  MTHREAD->GIS->updateImage("vol_"+ft);
2096  MTHREAD->GIS->updateImage("expectedReturns_"+ft);
2097 }
2098 
2099 
2100 }
2101 
2102 
2103 void
2105 
2106  msgOut(MSG_INFO, "Summing data pixels->region..");
2107  //vector <string> outForVariables = MTHREAD->MD->getStringVectorSetting("outForVariables");
2108  int currentYear = MTHREAD->SCD->getYear();
2109 
2110  //map<vector<string>,double> hVol_byPrd; // by regid, ft, dc, pp
2111  map<tr1::array<string, 4>,double> hVol_byPrd; // by regid, ft, dc, pp
2112 
2113  // OLD CODE TO
2114  for(uint r2= 0; r2<regIds2.size();r2++){
2115  int regId = regIds2[r2];
2116  regPx = MTHREAD->MD->getRegion(regId)->getMyPixels(); // by regId, ft, dc, pp
2117 
2118  // vector < vector <vector <double> > >hVol_byPrd; // by ft, dc, pp
2119 
2120  for(uint j=0;j<fTypes.size();j++){
2121  string ft = fTypes[j];
2122 
2123  double regArea = 0.;
2124  double sumAreaByFt = 0.;
2125  double pxForAreaByFt = 0.;
2126  double vReg = 0.;
2127  double sumSumHProductivity = 0.;
2128  double sumHArea = 0.;
2129 
2130  for (uint u=0; u<dClasses.size(); u++){
2131  string dc = dClasses[u];
2132  double vol =0.;
2133  double hV = 0.;
2134  double hArea = 0.;
2135  double vMort = 0.;
2136  double sumHProductivity = 0.; // productivity * area
2137  for (uint pi=0;pi<regPx.size();pi++){
2138  Pixel* px = regPx[pi];
2139  vol += px->vol.at(j).at(u);
2140  hV += px->hVol.at(j).at(u);
2141  hArea += px->hArea.at(j).at(u);
2142  vMort += px->vMort.at(j).at(u);
2143  sumHProductivity += (px->hProductivity.at(j).at(u) * px->hArea.at(j).at(u));
2144  if(MD->getBoolSetting("outDetailedHv",DATA_NOW)){
2145  for(uint pp=0;pp<priProducts.size();pp++){
2146  string pr = priProducts[pp];
2147  // vector <string> hVKey = {i2s(regId), ft, dc, pr};
2148  tr1::array<string, 4> hVKey = {i2s(regId), ft, dc, pr};
2149  //double debug = px->hVol_byPrd.at(j).at(u).at(pp);
2150  //cout << px->getID() << '\t' << j << '\t' << u << '\t' << pp << '\t' << debug << endl;
2151  incrOrAddMapValue(hVol_byPrd, hVKey, px->hVol_byPrd.at(j).at(u).at(pp));
2152  }
2153  }
2154  }
2155  if(u){
2156  sfd(vol,"vol",regId,ft,dc,DATA_NOW);
2157  sfd(hV,"hV",regId,ft,dc,DATA_NOW,true);
2158  double hProductivityByDc = hArea ? sumHProductivity/hArea : 0.;
2159  sumSumHProductivity += (hProductivityByDc*hArea);
2160  sumHArea += hArea;
2161  sfd(hProductivityByDc,"hProductivityByDc",regId,ft,dc,DATA_NOW,true);
2162  sfd(hArea,"harvestedArea",regId,ft,dc,DATA_NOW, true);
2163  sfd(vMort,"vMort",regId,ft,dc,DATA_NOW,true);
2164  double vol_1 = gfd("vol",regId,ft,dc,currentYear-1);
2165  if(vol_1){
2166  sfd(hV/vol_1,"hr",regId,ft,dc,DATA_NOW, true);
2167  } else {
2168  sfd(0.,"hr",regId,ft,dc,DATA_NOW, true);
2169  }
2170 
2171  }
2172  } // end foreach dc
2173  double hProductivity = sumHArea? sumSumHProductivity / sumHArea : 0.;
2174  sfd(hProductivity,"hProductivity",regId,ft,"",DATA_NOW,true);
2175 
2176  for (uint p=0;p<regPx.size();p++){
2177  Pixel* px = regPx[p];
2178  vReg += px->vReg.at(j);
2179  regArea += findMap(px->regArea,currentYear).at(j);
2180  pxForAreaByFt = (px->getDoubleValue("forArea_"+ft,true)/10000);
2181 
2182  sumAreaByFt += pxForAreaByFt;
2183  //double debug1 = sumAreaByFt;
2184  if(! (sumAreaByFt >= 0.0) ){
2185  msgOut(MSG_CRITICAL_ERROR,"sumAreaByFt is not non negative.");
2186  }
2187  }
2188  sfd(vReg,"vReg",regId,ft,"",DATA_NOW, true);
2189  sfd(regArea,"regArea",regId,ft,"",DATA_NOW, true);
2190  sfd(sumAreaByFt,"forArea",regId,ft,"",DATA_NOW, true);
2191  } // end of for each ft
2192 
2193  for (uint p=0;p<regPx.size();p++){
2194  Pixel* px = regPx[p];
2195  double totPxForArea = vSum(px->area);
2196 
2197 #ifdef QT_DEBUG
2198  double totPxForArea_debug = 0.0;
2199  for(uint j=0;j<fTypes.size();j++){
2200  string ft = fTypes[j];
2201  totPxForArea_debug += (px->getDoubleValue("forArea_"+ft,true)/10000);
2202  }
2203 
2204  if ( (totPxForArea - totPxForArea_debug) > 0.0001 || (totPxForArea - totPxForArea_debug) < -0.0001 ){
2205  cout << "*** ERROR: area discrepance in pixel " << px->getID() << " of " << (totPxForArea - totPxForArea_debug) << " ha!" << endl;
2206  msgOut(MSG_CRITICAL_ERROR,"Total forest area in pixel do not coincide if token from layer forArea or (pixel) vector area!");
2207  }
2208 #endif
2209  } // end of each pixel
2210 
2211 
2212  } // end each region
2213  //cout << "done" << endl;
2214  MTHREAD->DO->printDetailedHV(hVol_byPrd);
2215 
2216 
2217  // Taking care of expected returns here..
2218  // (Changed 25/08/2016 afternoon: expRet{ft,r} are now sum{px}{expRet{ft,px}*fArea_{px}}/fArea{r} and no longer sum{px}{expRet{ft,px}*fArea_{px,ft}}/fArea{r,ft} )
2219  // Also now we report the expReturns by group and by forest, each of which is made only with the best ones within their group
2220 
2221  vector<string> parentFtypes = MTHREAD->MD->getForTypeParents();
2222 
2223  for(uint r2= 0; r2<regIds2.size();r2++){
2224  int regId = regIds2[r2];
2225  regPx = MTHREAD->MD->getRegion(regId)->getMyPixels();
2226  double totRegionForArea = 0.;
2227  double totSumExpRet = 0.;
2228  vector <double> totSumExpRet_byFTParent(parentFtypes.size(),0.0);
2229  vector <double> totSumExpRet_byFTypes(fTypes.size(),0.0);
2230 
2231  // First computing the sumExpectedReturns..
2232  for (uint p=0;p<regPx.size();p++){
2233  Pixel* px = regPx[p];
2234  //int debug_pxid = px->getID();
2235  double pxForArea = vSum(px->area);
2236  totRegionForArea += pxForArea;
2237  double bestPxExpectedRet = getMax(px->expectedReturnsNotCorrByRa);
2238  for(uint i=0;i<parentFtypes.size();i++){
2239  vector <string> childIds = MTHREAD->MD->getForTypeChilds(parentFtypes[i]);
2240  vector <int> childPos = MTHREAD->MD->getForTypeChilds_pos(parentFtypes[i]);
2241  vector<double> pxExpReturnsByChilds(childPos.size(),0.0);
2242  for(uint j=0;j<childPos.size();j++){
2243  double pxExpReturn_singleFt = px->expectedReturns.at(childPos[j]);
2244  // Manual fix to not have the expected returns of ash within the general "broadL" expected returns.
2245  // To do: remove it after we work on the ash project.. I don't like manual fixes !!!
2246  pxExpReturnsByChilds.at(j) = (childIds.at(j) == "ash") ? 0.0 : pxExpReturn_singleFt;
2247  //pxExpReturnsByChilds.at(j) = pxExpReturn_singleFt;
2248  totSumExpRet_byFTypes.at(childPos[j]) += pxExpReturn_singleFt*pxForArea;
2249  } // end of each ft
2250  totSumExpRet_byFTParent[i] += getMax(pxExpReturnsByChilds)*pxForArea;
2251  } // end for each partentFt
2252  totSumExpRet += bestPxExpectedRet * pxForArea;
2253  } // end for each px
2254 
2255  // ..and now computing the expReturns and storing them
2256  for(uint i=0;i<parentFtypes.size();i++){
2257  vector <int> childPos = MTHREAD->MD->getForTypeChilds_pos(parentFtypes[i]);
2258  for(uint j=0;j<childPos.size();j++){
2259  //double debug1 = totSumExpRet_byFTypes.at(childPos[j])/totRegionForArea;
2260  sfd(totSumExpRet_byFTypes.at(childPos[j]),"sumExpReturns",regId,fTypes.at(childPos[j]),"",DATA_NOW, true);
2261  sfd(totSumExpRet_byFTypes.at(childPos[j])/totRegionForArea,"expReturns",regId,fTypes.at(childPos[j]),"",DATA_NOW, true);
2262  } // end of each ft
2263  //double debug2 = totSumExpRet_byFTParent.at(i)/totRegionForArea;
2264  sfd(totSumExpRet_byFTParent.at(i),"sumExpReturns",regId,parentFtypes[i],"",DATA_NOW, true);
2265  sfd(totSumExpRet_byFTParent.at(i)/totRegionForArea,"expReturns",regId,parentFtypes[i],"",DATA_NOW, true);
2266 
2267  } // end for each partentFt
2268  //double debug3 = totSumExpRet/totRegionForArea;
2269  sfd(totSumExpRet,"sumExpReturns",regId,"","",DATA_NOW, true);
2270  sfd(totSumExpRet/totRegionForArea,"expReturns",regId,"","",DATA_NOW, true);
2271 
2272  } // end for each region
2273 
2274  // Computing pathogens share of forest invasion
2275  if(MD->getBoolSetting("usePathogenModule")){
2276  for(uint r2= 0; r2<regIds2.size();r2++){
2277  int regId = regIds2[r2];
2278  regPx = MTHREAD->MD->getRegion(regId)->getMyPixels();
2279  double totalForArea = 0.0;
2280  double invadedArea = 0.0;
2281  for (uint p=0;p<regPx.size();p++){
2282  Pixel* px = regPx[p];
2283  int invaded = 0.0;
2284  for(uint j=0;j<fTypes.size();j++){
2285  for (uint u=0; u<dClasses.size(); u++){
2286  if(px->getPathMortality(fTypes[j],dClasses[u]) > 0){
2287  invaded = 1.0;
2288  }
2289  }
2290  }
2291  totalForArea += vSum(px->area);
2292  invadedArea += vSum(px->area)*invaded;
2293  }
2294  sfd(invadedArea/totalForArea,"totalShareInvadedArea",regId,"","",DATA_NOW, true);
2295  }
2296  } // end we are using path model
2297 
2298 
2299 
2300 
2301 }
2302 /**
2303  * This function call registerHarvesting() (accounts for emissions from for. operations), registerDeathBiomass() (registers new stocks of death biomass),
2304  * registerProducts() (registers new stock of products) and registerTransports() (accounts for emissions from transportation).
2305  *
2306  * It pass to registerProducts():
2307  * - for primary products, the primary products exported out of the country, but not those exported to other regions or used in the region as
2308  * these are assumed to be totally transformed to secondary products;
2309  * - for secondary products, those produced in the region from locally or regionally imported primary product plus those secondary products
2310  * imported from other regions, less those exported to other regions. It doesn't include the secondary products imported from abroad the country.
2311  */
2312 void
2314 
2315  //void registerHarvesting(const int & regId, const string & fType, const double & value); ///< register the harvesting of trees -> cumEmittedForOper
2316  //void registerDeathBiomass(const double &value, const int & regId, const string &fType);
2317  //void registerProducts(const double &value, const int & regId, const string &productName);
2318  //void registerTransports(const double &distQ, const int & regId);
2319 
2320  for(uint i=0;i<regIds2.size();i++){
2321  for(uint j=0;j<fTypes.size();j++){
2322  double deathBiomass = gfd("vMort",regIds2[i],fTypes[j],DIAM_ALL,DATA_NOW);
2323  double harvesting = gfd("hV",regIds2[i],fTypes[j],DIAM_ALL,DATA_NOW);
2324  MTHREAD->CBAL->registerDeathBiomass(deathBiomass, regIds2[i], fTypes[j]); // register new stock
2325  MTHREAD->CBAL->registerHarvesting(harvesting, regIds2[i], fTypes[j]); // account for emissions. Added 201500715: it also moves the extra biomass to the death biomass pool
2326  }
2327 
2328  for(uint p=0;p<priProducts.size();p++){
2329  // for the primary products we consider only the exports as the domestic consumption is entirelly transformed in secondary products
2330  double int_exports = gpd("sa",regIds2[i],priProducts[p],DATA_NOW);
2331  MTHREAD->CBAL->registerProducts(int_exports, regIds2[i], priProducts[p]); // register new stock
2332  }
2333  for(uint p=0;p<secProducts.size();p++){
2334  // for the tranformed product we skip those that are imported, hence derived from other forest systems
2335  // but we consider those coming from other regions
2336  double consumption = gpd("dl",regIds2[i],secProducts[p],DATA_NOW); // dl = sl + net regional imports
2337  MTHREAD->CBAL->registerProducts(consumption, regIds2[i], secProducts[p]); // register new stock
2338  }
2339 
2340  }
2341  for (uint r1=0;r1<l2r.size();r1++){
2342  for (uint r2=0;r2<l2r[r1].size();r2++){
2343  int rfrom= l2r[r1][r2];
2344  double distQProd = 0.0;
2345  for (uint r3=0;r3<l2r[r1].size();r3++){
2346  int rto = l2r[r1][r3];
2347  double dist = gpd("dist",rfrom,"",DATA_NOW,i2s(rto)); //km
2348  for(uint p=0;p<allProducts.size();p++){
2349  distQProd += dist*gpd("rt",rfrom,allProducts[p],DATA_NOW,i2s(rto)); //km*Mm^3
2350  }
2351  }
2352  MTHREAD->CBAL->registerTransports(distQProd, rfrom);
2353  }
2354  }
2355  MTHREAD->CBAL->HWP_eol2energy(); // used to compute the energy substitution from hwp that reach the end of life and doesn't go to landfil. Previously the energy substitution was computed in registerProducts(), that is at the time when the product was produced.
2356 
2357 }
2358 
2359 /**
2360  * Compute the expectation weighted price based on the ratio of the international (world) price between the future and now.
2361  *
2362  * @param curLocPrice The local current price
2363  * @param worldCurPrice The world current price
2364  * @param worldFutPrice The world future price
2365  * @param sl Supply local
2366  * @param sa Supply abroad
2367  * @param expCoef The expectation coefficient for prices for the agent [0,1]
2368  * @return The expType-averaged local (or weighter) price
2369  */
2370 double
2371 ModelCoreSpatial::computeExpectedPrice(const double & curLocPrice, const double & worldCurPrice, const double & worldFutPrice, const double & sl, const double & sa, const double & expCoef){
2372  double fullExpWPrice = (curLocPrice*(worldFutPrice/worldCurPrice)*sl+worldFutPrice*sa)/(sa+sl);
2373  double curWPrice = (curLocPrice*sl+worldCurPrice*sa)/(sl+sa);
2374  return curWPrice * (1-expCoef) + fullExpWPrice * expCoef;
2375 }
2376 
2377 /**
2378  * It uses volumes from gis data to "move" volumes from one forest type to the other (when called with what="vol"). Then it moves areas
2379  * proportionally and, as dc0 volumes are not defined but area it is, compute, again proportionally, area in destination forest times for dc=0
2380  * It acts on the pix->vol, pix->area and pix->area_l vectors. It also create/update the px->values layer map for the area, but it doesn't cash the
2381  * results in forDataMap.
2382  *
2383  * It is called first with parameter what="vol" in initializePixelVolumes() and then with what="area" in initializePixelAreas().
2384  * As we need the original volumes in the area allocation, original_vols is set as a static variable.
2385  *
2386  */
2387 void
2389  if(!MD->getBoolSetting("useSpExplicitForestTypes")) return;
2390 
2391  int nFTypes = fTypes.size();
2392  int nDC = dClasses.size();
2393  int pxC = 0;
2394 
2395  for(uint ir=0;ir<regIds2.size();ir++){
2396  int r2 = regIds2[ir];
2397  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2398  regPx = REG->getMyPixels();
2399  pxC += regPx.size();
2400  }
2401 
2402  static vector<vector<vector<double>>> original_vols(pxC, vector<vector<double>>(nFTypes, vector<double>(nDC, 0.0))); // by px counter, ftype, dc
2403 
2404  if(what=="vol"){
2405  // first, before transfering volumes, saving the original ones..
2406  for(uint i=0;i<fTypes.size();i++){
2407  for (uint u=0; u<dClasses.size(); u++){
2408  int pxC_loc = 0;
2409  for(uint ir=0;ir<regIds2.size();ir++){
2410  int r2 = regIds2[ir];
2411  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2412  regPx = REG->getMyPixels();
2413  for (uint p=0;p<regPx.size();p++){
2414  Pixel* px = regPx[p];
2415  original_vols[pxC_loc][i][u] += px->vol[i][u];
2416  pxC_loc ++;
2417  }
2418  }
2419  }
2420  }
2421  for(uint i=0;i<fTypes.size();i++){
2422  string fti = fTypes[i];
2423  for(uint o=0;o<fTypes.size();o++){
2424  string fto = fTypes[o];
2425  for (uint u=1; u<dClasses.size(); u++){ // first diameter class volumes are computed from the model..
2426  string layerName = "spInput#vol#"+fto+"#"+fti+"#"+i2s(u);
2427  if (MTHREAD->GIS->layerExist(layerName)){
2428  for(uint ir=0;ir<regIds2.size();ir++){
2429  int r2 = regIds2[ir];
2430  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2431  regPx = REG->getMyPixels();
2432  for (uint p=0;p<regPx.size();p++){
2433  Pixel* px = regPx[p];
2434  double vol_transfer = min(px->getDoubleValue(layerName,true)/1000000,px->vol[i][u]) ; // Vol in the layer are in m^3, in the model in Mm^3
2435  px->vol[i][u] -= vol_transfer;
2436  px->vol[o][u] += vol_transfer;
2437  }
2438  }
2439  }
2440  }
2441  }
2442  }
2443  }
2444 
2445  if(what=="area"){
2446  /**
2447  Allocate area proportionally to volumes (see file test_proportional_computation_of_areas_from_volumes.ods)
2448  Example:
2449  FtIn FtOut Vtrasfer
2450  con ash 0.2
2451  brHf ash 0.1
2452  brCopp ash 0.3
2453  con oak 0.3
2454  brHf oak 0.2
2455  brCopp oak 0.1
2456 
2457  Vorig Aorig Vnew Anew
2458  con 10 30 9.5 28.5 Aorig-Aorig*(Vtrasfer1/Vorig)-Aorig(Vtrasfer2/Vorig)
2459  brHf 5 20 4.7 18.8
2460  brCopp 2 20 1.6 16
2461  ash 0 0 0.6 4 Aorig1*Vtrasfer1/(Vorig1)+Aorig2*Vtrasfer2/(Vorig2)+...
2462  oak 0 0 0.6 2.7
2463  70 70
2464  */
2465  // first, before transfering areas, saving the original ones (we already saved the vols in the what="vol" section, that is called before this one)..
2466  vector<vector<vector<double>>> original_areas(pxC, vector<vector<double>>(nFTypes, vector<double>(nDC, 0.0))); // by px counter, ftype, dc
2467  for(uint i=0;i<fTypes.size();i++){
2468  for (uint u=0; u<dClasses.size(); u++){
2469  int pxC_loc = 0;
2470  for(uint ir=0;ir<regIds2.size();ir++){
2471  int r2 = regIds2[ir];
2472  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2473  regPx = REG->getMyPixels();
2474  for (uint p=0;p<regPx.size();p++){
2475  Pixel* px = regPx[p];
2476  original_areas[pxC_loc][i][u] += px->area_l[i][u];
2477  pxC_loc ++;
2478  }
2479  }
2480  }
2481  }
2482 
2483 
2484  // transferred areas ordered by pxcounter, i and then o ftype. Used to then repart the 0 diameter class..
2485  vector<vector<vector<double>>> transferred_areas(pxC, vector<vector<double>>(nFTypes, vector<double>(nFTypes, 0.0))); // initialize a 3d vector of nFTypes zeros.
2486 
2487  for(uint i=0;i<fTypes.size();i++){
2488  string fti = fTypes[i];
2489  for(uint o=0;o<fTypes.size();o++){
2490  string fto = fTypes[o];
2491  for (uint u=1; u<dClasses.size(); u++){ // first diameter class area is comuted proportionally..
2492  string layerName = "spInput#vol#"+fto+"#"+fti+"#"+i2s(u);
2493  if (MTHREAD->GIS->layerExist(layerName)){
2494  int pxC_loc = 0;
2495  for(uint ir=0;ir<regIds2.size();ir++){
2496  int r2 = regIds2[ir];
2497  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2498  regPx = REG->getMyPixels();
2499  for (uint p=0;p<regPx.size();p++){
2500  Pixel* px = regPx[p];
2501  double vol_i_orig = original_vols[pxC_loc][i][u];
2502  double vol_transfer = vol_i_orig?px->getDoubleValue(layerName,true)/1000000:0.0; // Vol in the layer are in m^3, in the model in Mm^3
2503  double area_i_orig = original_areas[pxC_loc][i][u];
2504  double area_transfer = vol_i_orig?area_i_orig*vol_transfer/vol_i_orig:0.0;
2505  px->area_l[i][u] -= area_transfer;
2506  px->area[i][u] = px->area_l[i][u];
2507  px->area_l[o][u] += area_transfer;
2508  px->area[o][u] = px->area_l[o][u];
2509  transferred_areas[pxC_loc][i][o] += area_transfer;
2510  pxC_loc ++;
2511  }
2512  }
2513  }
2514  }
2515  }
2516  }
2517 
2518  // Moving the area in the 0 diameter class, for which no info is normally available..
2519  double pxC_loc = 0;
2520  for(uint ir=0;ir<regIds2.size();ir++){
2521  int r2 = regIds2[ir];
2522  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2523  regPx = REG->getMyPixels();
2524  for (uint p=0;p<regPx.size();p++){
2525  Pixel* px = regPx[p];
2526  for(uint i=0;i<fTypes.size();i++){
2527  for(uint o=0;o<fTypes.size();o++){
2528  double area_i_orig = 0.0;
2529  for (uint u=1; u<dClasses.size(); u++){ // we want to skip the 0 diameter class, this is why we don't simply use vSum()..
2530  area_i_orig += original_areas[pxC_loc][i][u];
2531  }
2532  double area_transfer_u0 = area_i_orig?original_areas[pxC_loc][i][0]*(transferred_areas[pxC_loc][i][o]/area_i_orig):0.0;
2533  px->area_l[i][0] -= area_transfer_u0 ;
2534  px->area[i][0] = px->area_l[i][0];
2535  px->area_l[o][0] += area_transfer_u0 ; // bug corrected 20151130: it was 0 instead of o (output) !!
2536  px->area[o][0] = px->area_l[0][0]; // bug corrected 20151130: it was 0 instead of o (output) !!
2537  }
2538  }
2539  pxC_loc++;
2540  }
2541  }
2542 
2543  // Alligning the area memorised in the px layers to the new areas of the ft..
2544  for(uint i=0;i<fTypes.size();i++){
2545  string fti_id = fTypes[i];
2546  forType* fti = MTHREAD->MD->getForType(fti_id);
2547  int ft_memType = fti->memType;
2548  string ft_layerName = fti->forLayer;
2549  //if(ft_memType==3){
2550  // MTHREAD->GIS->addLayer(ft_layerName,ft_layerName,false,true); //20151130: no needed as we already added it in applyForestReclassification (yes, odd, as memory type 3 layerrs do not have any reclassification rule associated, but if I don't add the layer at that time I got other errors)
2551  // }
2552  if(ft_memType==3 ||ft_memType==2){
2553  for(uint ir=0;ir<regIds2.size();ir++){
2554  int r2 = regIds2[ir];
2555  ModelRegion* REG = MTHREAD->MD->getRegion(r2);
2556  regPx = REG->getMyPixels();
2557  for (uint p=0;p<regPx.size();p++){
2558  Pixel* px = regPx[p];
2559  double area_px = vSum(px->area[i]);
2560  px->changeValue(ft_layerName,area_px*10000);
2561  }
2562  }
2563  }
2564  }
2565  } // end if what is area
2566 }
2567 
2568 void
2570  // Print debug stats on inventory and supplies in each region..
2571  cout << "Printing debug information on initial regional inventories and supplies.." << endl;
2572  cout << "Reg\tProduct\t\tInv\tSt\tSa\tSl" << endl;
2573  for(uint r1=0;r1<l2r.size();r1++){
2574  for(uint r2c=0;r2c<l2r[r1].size();r2c++){
2575  for(uint p=0;p<priProducts.size();p++){
2576  int r2 = l2r[r1][r2c];
2577  double inv = gpd("in",r2,priProducts[p],secondYear);
2578  double st = gpd("st",r2,priProducts[p],secondYear);
2579  double sl = gpd("sl",r2,priProducts[p],secondYear);
2580  double sa = gpd("sa",r2,priProducts[p],secondYear);
2581  cout << r2 << "\t" << priProducts[p] << "\t\t" << inv << "\t" << st << "\t" << sl << "\t" << sa << endl;
2582  }
2583  }
2584  } // end of r1 region
2585  exit(0);
2586 
2587 }
2588 
2589 /**
2590  * @brief ModelCoreSpatial::allocateHarvesting
2591  * @param total_st vector of total supply by primary products
2592  * @return a vector of the remaining supply that goes allocated to alive timber (that is, to harvesting)
2593  *
2594  * The algorithm is such that is loops the deathTimberInventory map for each year (newer to older), dc (higher to smaller) and ft.
2595  * It compute the primary products allocable from that combination and allocate the cell amount to decrease the total_st of that products
2596  * in a proportional way to what still remain of the allocable products.
2597  *
2598  * It is called in the runMarketModule() function.
2599  *
2600  */
2601 
2602 vector <double>
2603 ModelCoreSpatial::allocateHarvesting(vector<double> total_st, const int &regId){
2604  if(!MD->getBoolSetting("useDeathTimber",DATA_NOW)) return total_st;
2605  //vector <double> st_fromHarv(priProducts.size(),0.);
2606  //map<iisskey, double > * deathTimberInventory= MD->getDeathTimberInventory();
2607  int maxYears = MD->getMaxYearUsableDeathTimber();
2608  int currentYear = MTHREAD->SCD->getYear();
2609  for(uint y = currentYear-1; y>currentYear-1-maxYears; y--){
2610  for (int u = dClasses.size()-1; u>=0; u--){ // I need to specify u as an integer !
2611  string dc = dClasses.at(u);
2612  for (uint f=0; f<fTypes.size(); f++){
2613  string ft = fTypes[f];
2614  vector<int>allocableProducts = MD->getAllocableProductIdsFromDeathTimber(regId, ft, dc, y, currentYear-1);
2615  iisskey key(y,regId,ft,dc);
2616  double deathTimber = MD->deathTimberInventory_get(key);
2617  double sum_total_st_allocable = 0;
2618  // Computing shares/weights or remaining st to allcate
2619  for(uint ap=0;ap<allocableProducts.size();ap++){
2620  sum_total_st_allocable += total_st.at(allocableProducts[ap]);
2621  }
2622  for(uint ap=0;ap<allocableProducts.size();ap++){
2623  double allocableShare = sum_total_st_allocable?total_st.at(allocableProducts[ap])/sum_total_st_allocable:0.0;
2624  double allocated = min(total_st[allocableProducts[ap]],deathTimber*allocableShare);
2625  MD->deathTimberInventory_incrOrAdd(key,-allocated);
2626  total_st[allocableProducts[ap]] -= allocated;
2627  }
2628  }
2629  }
2630  }
2631  return total_st;
2632 }
2633 
2634 
2635 /**
2636  * This function has two tasks:
2637  * 1) compute the policy balances (- -> public cost,subside; + -> public revenue, tax) for pol_mktDirInt_s, pol_mktDirInt_d, pol_trSub and pol_tcSub
2638  * (pol_fiSub are done directly in the runManagementModule() function)
2639  * 2) compute the producer/consumer surpluses
2640  */
2641 void
2643  //int thisYear = MTHREAD->SCD->getYear();
2644  int previousYear = MTHREAD->SCD->getYear()-1;
2645 
2646  for(uint r2=0; r2<regIds2.size();r2++){
2647  int regId = regIds2[r2];
2648  ModelRegion* reg = MD->getRegion(regId);
2649 
2650  for(uint p=0;p<priProducts.size();p++){
2651  double pol_mktDirInt_s = gpd("pol_mktDirInt_s",regId,priProducts[p]);
2652  double pol_mktDirInt_s_1 = gpd("pol_mktDirInt_s",regId,priProducts[p],previousYear);
2653  double sigma = gpd("sigma",regId,priProducts[p]);
2654  double in = gpd("in",regId,priProducts[p]);
2655  double in_1 = gpd("in",regId,priProducts[p], previousYear);
2656  double pc = gpd("pc",regId,priProducts[p]);
2657  double pc_1 = gpd("pc",regId,priProducts[p], previousYear);
2658  double sc = gpd("sc",regId,priProducts[p]);
2659  double sc_1 = gpd("sc",regId,priProducts[p],previousYear);
2660  double gamma_incr = gpd("gamma_incr",regId,priProducts[p]); // elast supply to increasing stocks
2661  double gamma_decr = gpd("gamma_decr",regId,priProducts[p]); // elast supply to decreasing stocks
2662 
2663  double gamma = in>in_1 ? gamma_incr: gamma_decr;
2664  double polBal_mktDirInt_s = pc * (1-pol_mktDirInt_s) * sc;
2665  double surplus_prod = pc * sc -
2666  pc_1
2667  * pol_mktDirInt_s_1
2668  * pow(pol_mktDirInt_s,-1)
2669  * pow(sc_1,-1/sigma)
2670  * pow(in_1/in,gamma/sigma)
2671  * (sigma/(sigma+1))
2672  * pow(sc,(sigma+1)/sigma);
2673 
2674  spd(polBal_mktDirInt_s,"polBal_mktDirInt_s",regId,priProducts[p],DATA_NOW,true); // M€
2675  spd(surplus_prod,"surplus_prod",regId,priProducts[p],DATA_NOW,true);
2676  }
2677  for(uint p=0;p<secProducts.size();p++){
2678  double pol_mktDirInt_d = gpd("pol_mktDirInt_d",regId,secProducts[p]);
2679  double pol_mktDirInt_d_1 = gpd("pol_mktDirInt_d",regId,secProducts[p],previousYear);
2680  double sigma = gpd("sigma",regId,secProducts[p]);
2681  double pol_trSub = gpd("pol_trSub",regId,secProducts[p]);
2682  double pc = gpd("pc",regId,secProducts[p]);
2683  double pc_1 = gpd("pc",regId,secProducts[p],previousYear);
2684  double dc = gpd("dc",regId,secProducts[p]);
2685  double dc_1 = gpd("dc",regId,secProducts[p],previousYear);
2686  double dc0 = gpd("dc",regId,secProducts[p],secondYear);
2687  double m = gpd("m",regId,secProducts[p]);
2688  double sl = gpd("sl",regId,secProducts[p]); // sl because dl include also the production from byproduct that has not subsides in the model
2689 
2690  double polBal_mktDirInt_d = pc * (pol_mktDirInt_d - 1) * dc;
2691  double polBal_trSub = m * (pol_trSub-1) * sl;
2692  double trs = 0.5; // 0.1: we consider 90 percent of the demand integral
2693  double surplus_cons = pc_1
2694  * pol_mktDirInt_d_1
2695  * pow(dc_1,-1/sigma)
2696  * pow(pol_mktDirInt_d,-1)
2697  * (sigma/(sigma+1))
2698  * (pow(dc,(sigma+1)/sigma) - pow(trs*dc0,(sigma+1)/sigma))
2699  - (dc - trs * dc0) * pc ;
2700 
2701  spd(polBal_mktDirInt_d,"polBal_mktDirInt_d",regId,secProducts[p],DATA_NOW,true); // M€
2702  spd(polBal_trSub,"polBal_trSub",regId,secProducts[p],DATA_NOW,true); // M€
2703  spd(surplus_cons,"surplus_cons",regId,secProducts[p],DATA_NOW,true); // M€
2704  }
2705 
2706  for(uint p=0;p<allProducts.size();p++){
2707  double pol_tcSub = gpd("pol_tcSub",regId,allProducts[p]);
2708  double polBal_tcSub = 0;
2709  vector<ModelRegion*> siblings = reg->getSiblings();
2710  for(uint rTo=0;rTo<siblings.size();rTo++){
2711  int regId2 = siblings[rTo]->getRegId();
2712  double ct = gpd("ct",regId,allProducts[p],DATA_NOW,i2s(regId2));
2713  double rt = gpd("rt",regId,allProducts[p],DATA_NOW,i2s(regId2));
2714  polBal_tcSub += ct*(pol_tcSub-1) * rt;
2715  }
2716  spd(polBal_tcSub,"polBal_tcSub",regId,allProducts[p],DATA_NOW,true); // M€
2717  }
2718 
2719  }
2720 
2721 }
2722 
2723 /**
2724  * Return the average age using cumTp vector
2725  */
2726 double
2728  if(dc == dClasses.size()-1) { return px->cumTp[ft][dc] * 1.2;}
2729  else {return (px->cumTp[ft][dc]+px->cumTp[ft][dc+1])/2; }
2730 }
2731 
2732 
2733 
2734 
2735 // added for carbon project
2736 /**
2737  * Return the Volume/ha in a forest after a given number of year after planting, for a specific forest type
2738  */
2739 double
2740 ModelCoreSpatial::getVHaByYear(const Pixel* px, const int& ft, const int& year, const double& extraBiomass_ratio) const {
2741  double vHa = 0;
2742  //int regId = px->getMyRegion()->getRegId();
2743  //double extraBiomass_ratio = gfd("extraBiomass_ratio",regId,fTypes[ft],"",DATA_NOW);
2744 
2745  for (int u = 0; u<dClasses.size(); u++){
2746  double cumTP_exp = px->cumTp_exp[ft][u];
2747  if (year>=cumTP_exp){
2748  vHa = px->vHa_exp[ft][u]*(1+extraBiomass_ratio);
2749  } else {
2750  return vHa;
2751  }
2752  }
2753  return vHa;
2754 }
void updateImage(string layerName_h)
Add one layer to the system.
Definition: Gis.cpp:700
vector< vector< double > > deltaArea
Definition: Pixel.h:108
void setSpModifier(const double &value, const int &ftindex)
Definition: Pixel.h:87
Class to provide a simple integer-integer-string-string key in std maps.
Definition: BaseClass.h:213
void advanceYear()
Definition: Scheduler.h:51
int getPos(const K &element, const vector< K > &v, const int &msgCode_h=MSG_CRITICAL_ERROR)
Definition: BaseClass.h:421
Print an ERROR message, but don&#39;t stop the model.
Definition: BaseClass.h:61
The required data is for the current year.
Definition: BaseClass.h:73
void runBiologicalModule()
computes hV, hArea and new vol at end of year
vector< int > getAllocableProductIdsFromDeathTimber(const int &regId_h, const string &ft, const string &dc, const int &harvesting_year, int request_year=DATA_NOW)
Returns the ids of the primary products that is possible to obtain using the timber recorded death in...
Definition: ModelData.cpp:2105
double getArea(const string &fType_h, const string &dClass_h)
Get area by ft and dc (from pixel->area matrix)
void initialiseDeathBiomassStocks(const vector< double > &deathByFt, const int &regId)
Initialises the stocks of death biomass for the avgLive_* years before the simulation starts...
Definition: Carbon.cpp:169
int getIntSetting(const string &name_h, int position=0, int reg=WORLD) const
Definition: ModelData.cpp:1105
vector< vector< int > > l2r
vector< vector< double > > area
Definition: Pixel.h:107
vector< string > dClasses
vector< string > getForTypeParents()
Definition: ModelData.cpp:122
vector< vector< double > > cumAlive_exp
This is the expected version of cumAlive, used for calculating profits.
Definition: Pixel.h:137
bool getBoolSetting(const string &name_h, int position=0, int reg=WORLD) const
Definition: ModelData.cpp:1117
void runManagementModule()
computes regArea and expectedReturns
Do not actually output any message.
Definition: BaseClass.h:57
void initializePixelVolumes()
distribuite regional exogenous volumes to pixel volumes using corine land cover area as weight ...
vector< vector< int > > createCombinationsVector(const int &nItems)
Return a vector containing any possible combination of nItems items (including any possible subset)...
Definition: ModelData.cpp:2055
vector< Pixel * > regPx
void sumRegionalForData()
computes vol, hV, harvestedArea, regArea and expReturns at reg level from the pixel level ...
Forest types (struct)
Definition: ModelData.h:293
void spd(const double &value_h, const string &type_h, const int &regId_h, const string &prodId_h, const int &year=DATA_NOW, const bool &allowCreate=false, const string &freeDim_h="") const
string i2s(const int &int_h) const
integer to string conversion
Definition: BaseClass.cpp:219
ThreadManager * MTHREAD
Pointer to the Thread manager.
Definition: BaseClass.h:467
K getMax(const vector< K > &v)
Returns the value of the maximum element in the vector (the last one in case of multiple equivalent m...
Definition: BaseClass.h:391
double getAvailableDeathTimber(const vector< string > &primProd_h, int regId_h, int year_h)
Returns the timber available for a given set of primary products as stored in the deathTimberInventor...
Definition: ModelData.cpp:2076
bool layerExist(const string &layerName_h, bool exactMatch=true) const
Return a pointer to a layer given its name.
Definition: Gis.cpp:552
void changeValue(const string &layerName_h, const double &value_h, const bool &setNoValueForZero=false)
Change the value of an existing layerMTHREAD->GIS->pack(parName, forName, dClass, year)...
Definition: Pixel.cpp:135
double getValue(string layerName, int op=OP_SUM)
return the values of its own pixels for the specified layer. Possible operations: OP_SUM or OP_AVG ...
double calculateAnnualisedEquivalent(const double &amount_h, const int &years_h, const double &ir) const
Calculate the annual equivalent flow.
Definition: ModelData.cpp:1961
ModelData * MD
the model data object
Definition: ThreadManager.h:72
void resetMapValues(map< K, V > &mymap, const V &value)
Reset all values stored in a map to the specified one.
Definition: BaseClass.h:341
void loadExogenousForestLayers(const string &what)
Set pixel volumes (what="vol") OR areas (what="area") by specific forest types as defined in gis laye...
int memType
Definition: ModelData.h:296
double expType
Sampling derived expectation types of this agent (forest bilogical parameters: growth, mortality)
Definition: Pixel.h:141
vector< double > avalCoef
Availability (of wood resources) coefficient. A [0,1] coefficient (new: by forest type) that reduces ...
Definition: Pixel.h:144
vector< string > priProducts
double getAvailableAliveTimber(const vector< string > &primProd_h, int regId_h)
Returns the timber available for a given set of primary products as stored in the px->vol_l vector...
Definition: ModelData.cpp:2124
vector< vector< double > > mort
Definition: Pixel.h:130
void printOptLog(bool optimal, int &nIterations, double &obj)
Definition: Output.cpp:829
Scheduler * SCD
the scheduler object (simulation-loops scheduler)
Definition: ThreadManager.h:75
vector< vector< double > > vMort
Definition: Pixel.h:118
vector< T > positionsToContent(const vector< T > &vector_h, const vector< int > &positions)
Return a vector of content from a vector and a vector of positions (int)
Definition: BaseClass.h:359
vector< vector< double > > vol_l
store the volumes of the previous year
Definition: Pixel.h:126
void msgOut(const int &msgCode_h, const string &msg_h, const bool &refreshGUI_h=true) const
Overloaded function to print the output log.
Definition: BaseClass.cpp:50
Gis * GIS
GIS information and methods.
Definition: ThreadManager.h:73
Thread manager. Responsable to manage the main thread and "speak" with the GUI.
Definition: ThreadManager.h:65
void incrMapValue(map< K, V > &mymap, const K &key, const V &value, const int &error_level=MSG_CRITICAL_ERROR)
Increments a value stored in a map of the specified value, given the key.
Definition: BaseClass.h:312
vector< string > getStringVectorSetting(const string &name_h, int reg=WORLD) const
Definition: ModelData.cpp:1129
Perform a SUM operation.
Definition: BaseClass.h:77
void resetPixelValues()
swap volumes->lagged_volumes and reset the other pixel vectors
double deathTimberInventory_get(const iisskey &thekey)
Definition: ModelData.h:197
vector< vector< double > > vHa_exp
This is the expected version of vHa, used for calculating profits.
Definition: Pixel.h:136
vector< string > pDClasses
vector< double > initialDc0Area
Definition: Pixel.h:109
vector< vector< double > > cumAlive
Cumulative prob of remaining alive at beginnin of a given diam class.
Definition: Pixel.h:134
vector< int > getForTypeChilds_pos(const string &forTypeId_h, bool all=false)
Definition: ModelData.cpp:109
int getMaxPos(const vector< K > &v)
Returns the position of the maximum element in the vector (the last one in case of multiple equivalen...
Definition: BaseClass.h:383
void computeInventary()
in=f(vol_t-1)
vector< vector< double > > vHa
Volume at hectar by each diameter class [m^3/ha].
Definition: Pixel.h:133
vector< double > inResByAnyCombination
Vector of inventory resource for each possible combination of primary products. This store both alive...
Definition: ModelRegion.h:86
string getStringSetting(const string &name_h, int position=0, int reg=WORLD) const
Definition: ModelData.cpp:1113
void runMarketModule()
computes st (supply total) and pw (weighted price). Optimisation inside.
void registerTransports(const double &distQ, const int &regId)
Registers the quantities emitted by transport of wood FROM a given region.
Definition: Carbon.cpp:277
int getNForTypesChilds(const string &forTypeId_h)
Definition: ModelData.cpp:135
void cacheSettings()
just cache exogenous settings from ModelData
Print an error message and stop the model.
Definition: BaseClass.h:62
void registerDeathBiomass(const double &value, const int &regId, const string &fType)
Registers the "death" of a given amount of biomass, storing it in the deathBiomass map...
Definition: Carbon.cpp:243
double getSTData(const string &parName, const string &forName, int year, const string &d2="", double naToReturn=RETNA)
Definition: Pixel.cpp:246
Output * DO
data output
Definition: ThreadManager.h:76
vector< int > optDcChosen
Definition: Pixel.h:122
double getDoubleValue(const string &layerName_h, const bool &returnZeroForNoValue=false) const
Return the value for a specific layer.
Definition: Pixel.cpp:158
string d2s(const double &double_h) const
double to string conversion
Definition: BaseClass.cpp:229
const double getForData(const string &type_h, const int &regId_h, const string &forType_h, const string &freeDim_h, const int &year=DATA_NOW)
Definition: ModelData.cpp:1281
void updateOtherMapData()
update (if the layer exists) other gis-based data, as volumes and expected returns, taking them from the data in the px object
int vSum(const vector< int > &vector_h) const
Definition: BaseClass.h:276
bool getTempBool()
Definition: ModelData.h:145
int getYear()
Definition: Scheduler.h:49
void initialiseDeathTimber()
Set deathTimberInventory to zero for the previous years (under the hipotesis that we don&#39;t have advan...
double getPastRegArea(const int &ft_idx, const int &year)
Definition: Pixel.cpp:359
vector< string > fTypes
Pixel-level class.
Definition: Pixel.h:47
vector< vector< double > > hProductivity
Definition: Pixel.h:112
void initialiseProductsStocks(const vector< double > &qByProduct, const int &regId)
Initialises the stocks of products for the avgLive_* years before the simulation starts.
Definition: Carbon.cpp:203
void setTempBool(bool tempBool_h)
Definition: ModelData.h:146
vector< vector< double > > vol
Definition: Pixel.h:90
double getMultiplier(const string &multiplierName, const string &forName, int year=DATA_NOW)
Definition: Pixel.cpp:184
vector< string > allProducts
vector< string > secProducts
vector< vector< double > > cumTp_exp
This is the expected version of cumTp, used for calculating profits.
Definition: Pixel.h:135
double expTypePrices
Sampling derived expectation types of this agent (prices)
Definition: Pixel.h:142
void registerHarvesting(const double &value, const int &regId, const string &fType)
Registers the harvesting of trees increasing the value of cumEmittedForOper.
Definition: Carbon.cpp:225
Print a debug message, normally filtered out.
Definition: BaseClass.h:58
Ipopt::SmartPtr< Ipopt::TNLP > OPT
Market optimisation.
Definition: ThreadManager.h:81
Carbon * CBAL
Module for the Carbon Balance.
Definition: ThreadManager.h:79
vector< vector< vector< double > > > hVol_byPrd
Definition: Pixel.h:113
Print a WARNING message.
Definition: BaseClass.h:60
vector< vector< double > > area_l
store the areas of the previous year
Definition: Pixel.h:127
vector< Pixel * > getAllPlotsByRegion(ModelRegion &region_h, bool shuffle=false)
Return the vector of all plots by a specific region (main region or subregion), optionally shuffled;...
Definition: Gis.cpp:855
string getForTypeParentId(const string &forTypeId_h)
Definition: ModelData.cpp:90
vector< string > getForTypeIds(bool all=false)
By default it doesn&#39;t return forTypes used only as input.
Definition: ModelData.cpp:430
double getVHaByYear(const Pixel *px, const int &ft, const int &year, const double &extraBiomass_ratio) const
return the Volume/ha in a forest after a given number of year after planting, for a specific forest t...
double getSd(const vector< K > &v, bool sample=true)
Definition: BaseClass.h:408
double gpd(const string &type_h, const int &regId_h, const string &prodId_h, const int &year=DATA_NOW, const string &freeDim_h="") const
map< K, V > vectorToMap(const vector< K > &keys, const V &value=0.0)
Returns a map built using the given vector and the given (scalar) value as keys/values pairs...
Definition: BaseClass.h:349
vector< int > optFtChosen
Definition: Pixel.h:121
vector< int > optDc
Definition: Pixel.h:120
vector< double > expectedReturns
Definition: Pixel.h:119
V findMap(const map< K, V > &mymap, const K &key, const int &error_level=MSG_CRITICAL_ERROR, const V &notFoundValue=numeric_limits< V >::min()) const
Lookup a map for a value. Return the value starting from the key.
Definition: BaseClass.h:286
vector< vector< double > > tp
Definition: Pixel.h:131
double getPathMortality(const string &forType, const string &dC, int year=DATA_NOW)
Return the INCREASED mortality due to pathogen presence for a given ft and dc in a certain year (defa...
Definition: Pixel.cpp:298
ModelCoreSpatial(ThreadManager *MTHREAD_h)
double getAvgAgeByDc(Pixel *px, int ft, int dc)
return the average age of a tree at a given diameter class, using the cumTp vector ...
int getErrorLevel()
Definition: ModelData.h:144
void swap(const int &swap_what)
Assign to the delayed value the current values, e.g. vol_l = vol.
Definition: Pixel.cpp:391
string forLayer
Definition: ModelData.h:297
vector< ModelRegion * > getSiblings(bool excludeResidual=true)
Return a vector of pointers to the siblings regions.
Definition: ModelRegion.cpp:85
void printDetailedHV(map< tr1::array< string, 4 >, double > hVol_byPrd)
Definition: Output.cpp:1128
vector< double > expectedReturnsNotCorrByRa
by ft. Attenction, reported expReturns at "forest" level (compared with those at forest type level) d...
Definition: Pixel.h:124
vector< vector< double > > hVol
Definition: Pixel.h:111
void incrOrAddMapValue(map< K, V > &mymap, const K &key, const V &value)
Increments a value stored in a map of the specified value, given the key.
Definition: BaseClass.h:325
void initialiseEmissionCounters()
Initialises the emission counters to zero.
Definition: Carbon.cpp:158
void setErrorLevel(int errorLevel_h)
Definition: ModelData.h:143
void initMarketModule()
computes st and pw for second year and several needed-only-at-t0-vars for the market module ...
vector< int > getRegionIds(int level_h, bool excludeResidual=true)
Definition: ModelData.cpp:366
double computeExpectedPrice(const double &curLocPrice, const double &worldCurPrice, const double &worldFutPrice, const double &sl, const double &sa, const double &expCoef)
Compute weighted expected price for a given product.
void print(bool earlyPrint=true)
Print output. If earlyPrinting it doesn&#39;t print some stuff for which we don&#39;t yet have values...
Definition: Output.cpp:426
void HWP_eol2energy()
Computes the energy substitution for the quota of HWP that reachs end of life and doesn&#39;t go to landf...
Definition: Carbon.cpp:288
void initializePixelArea()
compute px->area for each ft and dc
vector< int > regIds2
double getDoubleSetting(const string &name_h, int position=0, int reg=WORLD) const
Definition: ModelData.cpp:1109
vector< vector< double > > cumTp
This is time of passage to REACH a diameter class (while the exogenous tp by diameter class is the ti...
Definition: Pixel.h:132
vector< vector< double > > hArea
Definition: Pixel.h:110
void sfd(const double &value_h, const string &type_h, const int &regId_h, const string &forType_h, const string &freeDim_h, const int &year=DATA_NOW, const bool &allowCreate=false) const
string getRegSName() const
Definition: ModelRegion.h:67
vector< double > vReg
Definition: Pixel.h:117
double gfd(const string &type_h, const int &regId_h, const string &forType_h, const string &freeDim_h, const int &year=DATA_NOW) const
bool app(const string &prod_h, const string &forType_h, const string &dClass_h) const
vector< vector< double > > beta
Definition: Pixel.h:129
vector< Pixel * > getMyPixels()
Definition: ModelRegion.h:86
Print an INFO message.
Definition: BaseClass.h:59
void assignSpMultiplierPropToVols()
ModelCoreSpatial::assignSpMultiplierPropToVols assigns the spatial multiplier (used in the time of re...
ModelRegion * getRegion(int regId_h)
Definition: ModelData.cpp:346
void cacheDynamicSettings()
cache settings that may change with time
void initialiseCarbonModule()
call initialiseDeathBiomassStocks(), initialiseProductsStocks() and initialiseEmissionCounters() ...
void cachePixelExogenousData()
computes pixel level tp, meta and mort
double getAvg(const vector< K > &v)
Returns the average of the elements in the vector.
Definition: BaseClass.h:399
#define DIAM_ALL
All diameter classes.
Definition: BaseClass.h:157
void updateMapAreas()
computes forArea_{ft}
void deathTimberInventory_incrOrAdd(const iisskey &thekey, double value_h)
Definition: ModelData.h:195
void registerCarbonEvents()
call registerHarvesting(), registerDeathBiomass(), registerProducts() and registerTransports() ...
vector< string > getForTypeChilds(const string &forTypeId_h)
Definition: ModelData.cpp:98
void computeCumulativeData()
computes cumTp_exp, vHa_exp, vHa
void registerProducts(const double &value, const int &regId, const string &productName)
Registers the production of a given amount of products, storing it in the products maps...
Definition: Carbon.cpp:257
double getID() const
Definition: Pixel.h:67
void printDebugInitRegionalValues()
print initial inv, st, sl and sa in each region
void computeEconomicBalances()
compute the policy balances (- -> costs; + -> rev) and the producer/consumer surpluses ...
forType * getForType(int position)
Definition: ModelData.h:128
map< int, vector< double > > regArea
Definition: Pixel.h:114
const int getMaxYearUsableDeathTimber(const string &prod_h, const string &forType_h, const string &dClass_h)
Definition: ModelData.cpp:468
vector< double > allocateHarvesting(vector< double > total_st, const int &regId)
Using the deathTimberInventory map, this function allocate the total st in st from death timber (that...