Newer
Older
import ac.ed.lurg.carbon.CarbonFluxItem;
import ac.ed.lurg.carbon.CarbonFluxRasterSet;
import ac.ed.lurg.carbon.CarbonFluxReader;
import ac.ed.lurg.country.*;
import ac.ed.lurg.demand.*;
import ac.ed.lurg.forestry.ForestryDataOutputer;
import ac.ed.lurg.forestry.WoodYieldItem;
import ac.ed.lurg.forestry.WoodYieldRasterSet;
import ac.ed.lurg.forestry.WoodYieldReader;
import ac.ed.lurg.landuse.*;
import ac.ed.lurg.output.LandUseOutputer;
import ac.ed.lurg.output.LpjgOutputer;
import ac.ed.lurg.types.LandCoverType;
import ac.ed.lurg.utils.FileWriterHelper;
import ac.ed.lurg.utils.LogWriter;
import ac.ed.lurg.yield.LPJYieldResponseMapReader;
import ac.ed.lurg.yield.YieldRaster;
import ac.ed.lurg.yield.YieldResponsesItem;
import ac.sac.raster.*;
import java.io.*;
import java.lang.reflect.Field;
import java.util.*;
import java.util.Map.Entry;
private CountryAgentManager countryAgents;
private CountryBoundaryRaster countryBoundaryRaster;
private AbstractDemandManager demandManager;
private AnimalRateManager animalRateManager;
private LPJYieldResponseMapReader lpjYieldReader;
private YieldRaster yieldSurfaces;
Peter Alexander
committed
private InternationalMarket internationalMarket;
private IrrigationRasterSet currentIrrigationData;
private RasterSet<LandUseItem> globalLandUseRaster;
private RasterSet<IntegerRasterItem> clusterIdRaster;
private WoodYieldReader woodYieldReader;
private WoodYieldRasterSet woodYieldData;
private CarbonFluxReader carbonFluxReader;
private CarbonFluxRasterSet carbonFluxData;
ModelMain theModel = new ModelMain();

Peter Alexander
committed
System.out.println("Working Directory = " + System.getProperty("user.dir"));
theModel.setup();
theModel.run();
}
/* setup models, reading inputs, etc. */
private void setup() {
desiredProjection = RasterHeaderDetails.getGlobalHeaderFromCellSize(ModelConfig.CELL_SIZE_X, ModelConfig.CELL_SIZE_Y, "999");
BaseConsumpManager baseConsumpManager = new BaseConsumpManager();
CalorieManager calorieManager = new CalorieManager();
lpjYieldReader = new LPJYieldResponseMapReader(desiredProjection);
if (ModelConfig.DEMAND_FROM_FILE)
demandManager = new DemandManagerFromFile(calorieManager);
else if (ModelConfig.PRICE_ELASTIC_DEMAND)
demandManager = getElasticDemandManager(baseConsumpManager,calorieManager);
demandManager = new DemandManagerSSP(ModelConfig.SSP_SCENARIO, baseConsumpManager,calorieManager);
countryBoundaryRaster = getCountryBoundaryRaster();
clusterIdRaster = ModelConfig.GENERATE_NEW_YIELD_CLUSTERS ? new RasterSet<IntegerRasterItem>(desiredProjection) : getClusterRaster();
globalLandUseRaster = new RasterSet<LandUseItem>(desiredProjection);
Peter Alexander
committed
internationalMarket = new InternationalMarket();
woodYieldReader = new WoodYieldReader(desiredProjection);
carbonFluxReader = new CarbonFluxReader(desiredProjection);
if (ModelConfig.IS_CALIBRATION_RUN)
saveConfig();
createCountryAgents(CountryManager.getInstance().getAllCompositeCountries());
for (int i = ModelConfig.START_TIMESTEP; i <= ModelConfig.END_TIMESTEP; i++) {
Timestep timestep = new Timestep(i);
LpjgOutputer.writeMarkerFile(timestep.getYear(), true);
private void doTimestep(Timestep timestep) {
LogWriter.println("Timestep: " + timestep.toString());
ModelConfig.getModelConfig().setTimestep(timestep);
if (ModelConfig.IS_CALIBRATION_RUN && timestep.isInitialTimestep()) { // initialize trade and production
internationalMarket.determineInternationalTrade(countryAgents.getAll(), timestep);
}
yieldSurfaces = getYieldSurfaces(timestep); // this will wait for the marker file from LPJ if configured to do so
getUpdateIrrigationData(timestep); // updating currentIrrigationData
// When running half earth we can to alter protected areas data at a point in time
if (ModelConfig.HALFEARTH && ModelConfig.FORCE_PROTECTED_AREAS_START_YEAR == timestep.getYear() && !ModelConfig.IS_CALIBRATION_RUN) {
new ProtectedAreasReader(globalLandUseRaster).getRasterDataFromFile(ModelConfig.HALF_EARTH_FILE);
countryAgents.updateProtectedAreasForAll(globalLandUseRaster);
}
if (ModelConfig.FORCE_LCC) {
forceLandCoverChanges(timestep);
}
getWoodYieldData(timestep);
getCarbonFluxData(timestep);
LogWriter.println("Memory usage 1: " + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / (1024.0*1024.0*1024.0));
ConversionCostReader conCostReader = new ConversionCostReader(timestep);
Map<LccKey, Double> conversionCosts = conCostReader.getConversionCosts();
handleMissingData();
countryAgents.determineProductionForAll(timestep, yieldSurfaces, currentIrrigationData, carbonFluxData, woodYieldData, conversionCosts);
if (ModelConfig.RESET_STOCK_YEAR == timestep.getYear())
internationalMarket.resetStocks();
internationalMarket.determineInternationalTrade(countryAgents.getAll(), timestep);
int i = 0;
while (i < ModelConfig.DEMAND_RECALC_MAX_ITERATIONS ||
(ModelConfig.DEMAND_RECALC_ON_NEGATIVE_STOCK && internationalMarket.negativeStockLevelsExist() && i<10)) { // loop if negative stock have we haven't tried 10 times already
LogWriter.println("\n++ Re-estimating prices and demand: timestep " + timestep.getTimestep() + ": interation " + i);
countryAgents.recalculateDemandAndNetImportsForAll(); // recalculate demand from new prices and calculate imports and exports
internationalMarket.determineInternationalTrade(countryAgents.getAll(), timestep); // calculate prices
LogWriter.println("Memory usage 2: " + (Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory()) / (1024.0*1024.0*1024.0));

Peter Alexander
committed
checkAndSaveCheckpoint(timestep);
}

Peter Alexander
committed
private void checkAndSaveCheckpoint(Timestep timestep) {
if (shouldSaveCheckpoint(timestep)
|| (ModelConfig.SERIALIZE_FINAL_TIMESTEP_ONLY && ModelConfig.IS_CALIBRATION_RUN
&& timestep.getTimestep() == ModelConfig.END_TIMESTEP)
|| (!ModelConfig.SERIALIZE_FINAL_TIMESTEP_ONLY && ModelConfig.IS_CALIBRATION_RUN)) {
serializeCheckpoint();
}
private boolean shouldSaveCheckpoint(Timestep timestep) {

Peter Alexander
committed
if (ModelConfig.CHECKPOINT_YEARS != null) {
LogWriter.println("Looking to see if checkpoint year reached " + ModelConfig.CHECKPOINT_YEARS);
String[] yearStr = ModelConfig.CHECKPOINT_YEARS.split(",");
for(int i=0; i<yearStr.length; i++) {
LogWriter.println("Got a checkpoint yearStr " + yearStr[i]);
int year = Integer.parseInt(yearStr[i]);
if (timestep.getYear() == year)
return true;

Peter Alexander
committed
}
}
return false;
private void writeLandCoverFile(Timestep timestep, RasterSet<LandUseItem> landUseRaster) {
StringBuffer sbHeadings = new StringBuffer("Year,Cropland,Pasture,TimberForest,CarbonForest,Natural,Suitable,EnergyCrop,FertCrop,IrrigCrop,ManIntCrop,ManIntPast");
BufferedWriter outputFile = FileWriterHelper.getFileWriter(timestep, ModelConfig.LAND_COVER_OUTPUT_FILE, sbHeadings.toString());
StringBuffer sbData = new StringBuffer();
sbData.append(String.format("%d,%.1f,%.1f,%.1f,%.1f,%.1f,%.1f", timestep.getYear(),
LandUseItem.getTotalLandCover(landUseRaster.values(), LandCoverType.CROPLAND),
LandUseItem.getTotalLandCover(landUseRaster.values(), LandCoverType.PASTURE),
LandUseItem.getTotalLandCover(landUseRaster.values(), LandCoverType.TIMBER_FOREST),
LandUseItem.getTotalLandCover(landUseRaster.values(), LandCoverType.CARBON_FOREST),
LandUseItem.getTotalLandCover(landUseRaster.values(), LandCoverType.NATURAL),
R0slyn
committed
LandUseItem.getSuitableTotal(landUseRaster.values(), timestep.getYear()))
sbData.append(String.format(",%.1f", LandUseItem.getTotalCropArea(landUseRaster.values(), CropType.ENERGY_CROPS)));
sbData.append(String.format(",%.1f", LandUseItem.getFertiliserTotal(landUseRaster.values(), CropType.getCropsLessPasture()) / 1000));
sbData.append(String.format(",%.1f", LandUseItem.getIrrigationTotal(landUseRaster.values(), CropType.getCropsLessPasture())));
sbData.append(String.format(",%.1f", LandUseItem.getManagementIntensityTotal(landUseRaster.values(), CropType.getCropsLessPasture())));
sbData.append(String.format(",%.1f", LandUseItem.getManagementIntensityTotal(landUseRaster.values(), CropType.PASTURE)));
outputFile.write(sbData.toString());
outputFile.newLine();
outputFile.close();
private void writeGlobalMarketFile(Timestep timestep) {
try {
BufferedWriter outputFile = FileWriterHelper.getFileWriter(timestep, ModelConfig.PRICES_OUTPUT_FILE, "Year,Crop,Imports (Mt),Exports (Mt),New export price, Stock Levels (Mt)");
Peter Alexander
committed
internationalMarket.writeGlobalMarketFile(timestep, outputFile);
private void writeGlobalFoodBalanceSheet(Timestep timestep, RasterSet<LandUseItem> landUseRaster) {
BufferedWriter outputFile = FileWriterHelper.getFileWriter(timestep, ModelConfig.FOOD_BALANCE_SHEET_FILE, "Year,Crop,Production,Imports,Export,TransportLosses,StockVar,Supply,MonogastricsFeed,RuminantsFeed,SeedAndOtherLosses,FoodAnd1stGen,ProdArea");
Map<CropType, GlobalPrice> worldPrices = internationalMarket.getWorldPrices();
double harvestedAndFallowArea = 0;
for (CropType crop : CropType.getCropsLessPasture()) {
GlobalPrice priceQuantity = worldPrices.get(crop); // some specific logic for import/exports and this has been aggregated already, so best to use it
double prod=0, prodArea=0, feedMonogastrics=0, feedRuminants=0;
double exportsBeforeTL=0, imports=0, transportloss=0, stockChange=0;
if (priceQuantity != null) {
exportsBeforeTL = priceQuantity.getExportsBeforeTransportLoss();
imports = priceQuantity.getImportAmount();
transportloss = priceQuantity.getTransportLosses();
stockChange = priceQuantity.getStockChange();
}
for (AbstractCountryAgent ca : countryAgents.getAll()) {
Map<CropType, CropUsageData> allCropUsage = ca.getCropUsageData();
CropUsageData cropUsage = allCropUsage.get(crop);
if (cropUsage != null) {
prod += cropUsage.getProductionExpected();
prodArea += cropUsage.getArea();
feedMonogastrics += cropUsage.getMonogastricFeed();
feedRuminants += cropUsage.getRuminantFeed();
}
double seedAndWaste = prod * crop.getSeedAndWasteRate();
double netSupply = prod - exportsBeforeTL + imports;
double foodAnd1stGen = netSupply - feedMonogastrics - feedRuminants - seedAndWaste;
if (!crop.equals(CropType.SETASIDE))
prodArea *= (1-ModelConfig.UNHANDLED_CROP_RATE); // remove unhandled crop area
harvestedAndFallowArea += prodArea;
StringBuffer sbData = new StringBuffer();
sbData.append(String.format("%d,%s", timestep.getYear(), crop.getGamsName()));
sbData.append(String.format(",%.2f", prod));
sbData.append(String.format(",%.2f,%.2f,%.2f,%.2f", imports, exportsBeforeTL, transportloss, stockChange));
sbData.append(String.format(",%.2f,%.2f,%.2f,%.2f,%.2f", netSupply, feedMonogastrics, feedRuminants, seedAndWaste, foodAnd1stGen));
sbData.append(String.format(",%.2f", prodArea));
outputFile.write(sbData.toString());
outputFile.newLine();
}
double cropLandArea = LandUseItem.getTotalLandCover(landUseRaster.values(), LandCoverType.CROPLAND);
double unhandledArea = cropLandArea - harvestedAndFallowArea;
outputFile.write(String.format("%d,%s,,,,,,,,,,,%.2f", timestep.getYear(), "unhandled", unhandledArea));
outputFile.newLine();
outputFile.close();
} catch (IOException e) {
LogWriter.print(e);
}
}
private void writeDemandFile(Timestep timestep) {
try {
BufferedWriter outputFile = FileWriterHelper.getFileWriter(timestep, ModelConfig.DEMAND_OUTPUT_FILE, "Year,Commodity,Amount (Mt)");
for (CommodityType comm : CommodityType.getAllFoodItems()) {
for (AbstractCountryAgent country : countryAgents.getAll()) {
Map<CommodityType, Double> demands = country.getCurrentProjectedDemand();
if (demands == null) {
LogWriter.printlnError(country.getCountry() + " " + comm);
}
Double d = demands.get(comm);
LogWriter.println(String.format("%s,%s,%.4f", country.getCountry(), comm.getGamsName(), d));
}
sbData.append(String.format("%d,%s", timestep.getYear(), comm.getGamsName()));
sbData.append(String.format(",%.1f", demandAmount));
LogWriter.println("Global demand " + timestep.getYear() + " " + comm.getGamsName() + " " + demandAmount + "\n");
outputFile.write(sbData.toString());
outputFile.newLine();
}
double gen2EcDemand = countryAgents.getAll().stream().mapToDouble(c -> c.getCurrentGen2EcDemand()).sum();
outputFile.write(String.format("%d,%s,%.1f", timestep.getYear(), CropType.ENERGY_CROPS.getGamsName(), gen2EcDemand));
outputFile.newLine();
R0slyn
committed
private void writeDomesticProductionFile(Timestep timestep) {
StringBuffer sbHeadings = new StringBuffer("Year, Country, Crop, Area, Production, Production_cost, Import_price, Export_price, Consumer_price, Net_imports, Net_import_cost, Prod_shock, Rum_feed_amount, Mon_feed_amount");
BufferedWriter outputFile = FileWriterHelper.getFileWriter(timestep, ModelConfig.DOMESTIC_OUTPUT_FILE, sbHeadings.toString());
for (AbstractCountryAgent country : countryAgents.getAll()) {
R0slyn
committed
Map<CropType, CropUsageData> cropUsageAllCrops = country.getCropUsageData();
CropUsageData cropUsage = cropUsageAllCrops.get(crop);
R0slyn
committed
if (cropUsage == null)
continue;
double prodCosts = cropUsage.getTotalProdCost();
double prod = cropUsage.getProductionExpected();
double prodShock = cropUsage.getProductionShock();
double area = cropUsage.getArea();
double rumFeedAmount = cropUsage.getRuminantFeed();
double monFeedAmount = cropUsage.getMonogastricFeed();
R0slyn
committed
double importPrice = 0;
double exportPrice = 0 ;
double consumerPrice = 0;
double netImports = 0;
double netImportCost = 0;
CountryPrice px = country.getCurrentCountryPrices().get(crop);
consumerPrice = px.getConsumerPrice();
netImports = cropUsage.getNetImportsExpected(); //this isn't accounting for transport losses in exports
netImportCost = cropUsage.getNetImportCostExpected();
sbData.append(String.format("%d,%s,%s", timestep.getYear(), country.getCountry(), crop.getGamsName()));
sbData.append(String.format(",%.4f,%.4f,%.4f,%.4f,%.4f,%.4f,%.4f,%.4f,%.4f,%.4f,%.4f", area, prod, prodCosts, importPrice, exportPrice, consumerPrice, netImports, netImportCost, prodShock, rumFeedAmount, monFeedAmount));
outputFile.write(sbData.toString());
outputFile.newLine();
}
}
outputFile.close();
} catch (IOException e) {
LogWriter.print(e);
}
}
private void writeWoodProdFile(Timestep timestep) {
try {
StringBuffer sbHeadings = new StringBuffer("Year, Country, Item, Production, Import_price, Export_price, Net_imports");
BufferedWriter outputFile = FileWriterHelper.getFileWriter(timestep, ModelConfig.WOOD_OUTPUT_FILE, sbHeadings.toString());
for (AbstractCountryAgent country : countryAgents.getAll()) {
Map<WoodType, WoodUsageData> woodUsageMap = country.getWoodUsageData();
for (WoodType woodType : WoodType.values()) {
WoodUsageData woodUsage = woodUsageMap.get(woodType);
if (woodUsage == null)
continue;
double prod = woodUsage.getProduction();
double netImports = woodUsage.getNetImport();
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
CountryPrice px = country.getCurrentCountryWoodPrices().get(woodType);
double importPrice = px.getImportPrice();
double exportPrice = px.getExportPrice();
StringBuffer sbData = new StringBuffer();
sbData.append(String.format("%d,%s,%s", timestep.getYear(), country.getCountry(), woodType.getName()));
sbData.append(String.format(",%.4f,%.4f,%.4f,%.4f", prod, importPrice, exportPrice, netImports));
outputFile.write(sbData.toString());
outputFile.newLine();
}
}
outputFile.close();
} catch (IOException e) {
LogWriter.print(e);
}
}
private void writeCarbonFluxesFile(Timestep timestep) {
try {
StringBuffer sbHeadings = new StringBuffer("Year, Country, Credits, Import_price, Export_price, Net_imports, Net_flux");
BufferedWriter outputFile = FileWriterHelper.getFileWriter(timestep, ModelConfig.CARBON_OUTPUT_FILE, sbHeadings.toString());
for (AbstractCountryAgent country : countryAgents.getAll()) {
CarbonUsageData carbonUsage = country.getCarbonUsageData();
if (carbonUsage == null)
continue;
double credits = carbonUsage.getCarbonCredits();
double netFlux = carbonUsage.getNetCarbonFlux();
double netImports = carbonUsage.getNetCarbonImport();
CountryPrice px = country.getCurrentCountryCarbonPrice();
double importPrice = px.getImportPrice();
double exportPrice = px.getExportPrice();
StringBuffer sbData = new StringBuffer();
sbData.append(String.format("%d,%s", timestep.getYear(), country.getCountry()));
sbData.append(String.format(",%.4f,%.4f,%.4f,%.4f,%.4f", credits, importPrice, exportPrice, netImports, netFlux));
outputFile.write(sbData.toString());
outputFile.newLine();
}
outputFile.close();
} catch (IOException e) {
LogWriter.print(e);
}
}
private void writeCountryDemandFile(Timestep timestep){
try {
StringBuffer sbHeadings = new StringBuffer("Year, Country, Commodity, Demand, BioenergyDemand, ConsumerPrice");
BufferedWriter outputFile = FileWriterHelper.getFileWriter(timestep, ModelConfig.COUNTRY_DEMAND_FILE, sbHeadings.toString());
for (AbstractCountryAgent country : countryAgents.getAll()) {
for (CommodityType commodity : CommodityType.getAllFoodItems()) {
double bioenergyDemand = demandManager.getFirstGenBioenergyDemand(country.getCountry(), timestep.getYear(), commodity);
double demand = country.getCurrentProjectedDemand().get(commodity);
double consumerPrice = country.getCurrentConsumerPrice(commodity);
StringBuffer sbData = new StringBuffer();
sbData.append(String.format("%d,%s,%s", timestep.getYear(), country.getCountry(), commodity.getGamsName()));
sbData.append(String.format(",%.4f,%.4f,%.4f", demand, bioenergyDemand, consumerPrice));
outputFile.write(sbData.toString());
outputFile.newLine();
}
}
outputFile.close();
} catch (IOException e) {
LogWriter.print(e);
}
}
private void writeAnimalNumber(Timestep timestep) {
try {
StringBuffer sbHeadings = new StringBuffer("Year,Country,FAOItem,Heads(M)");
BufferedWriter outputFile = FileWriterHelper.getFileWriter(timestep, ModelConfig.ANIMAL_NUMBERS_OUTPUT_FILE, sbHeadings.toString());
for (AbstractCountryAgent country : countryAgents.getAll()) {
Map<CropType, CropUsageData> cropUsageAllCrops = country.getCropUsageData();
for (CropType crop : CropType.getMeatTypes()) {
CropUsageData cropusage = cropUsageAllCrops.get(crop);
if (cropusage == null)
continue;
double prod = cropusage.getProductionExpected();
Map<String, Double> animalRates = animalRateManager.getAnimalRates(country.getCountry(), crop);
for (Entry<String, Double> entry : animalRates.entrySet()) {
StringBuffer sbData = new StringBuffer();
double animalNum = prod * entry.getValue();
sbData.append(String.format("%d,%s,%s,%.4f", timestep.getYear(), country.getCountry(), entry.getKey(), animalNum));
outputFile.write(sbData.toString());
outputFile.newLine();
}
}
}
outputFile.close();
} catch (IOException e) {
LogWriter.print(e);
}
}
private void outputTimestepResults(Timestep timestep) {
writeLandCoverFile(timestep, globalLandUseRaster);
writeWoodProdFile(timestep);
writeCarbonFluxesFile(timestep);
writeCountryDemandFile(timestep);
writeGlobalFoodBalanceSheet(timestep, globalLandUseRaster);
writeAnimalNumber(timestep);
if (ModelConfig.OUTPUT_FOR_LPJG) {
for (int outputYear : timestep.getYearsFromLast()) {
Peter Alexander
committed
LogWriter.println("Outputing Year: " + outputYear);
RasterSet<LandUseItem> landUseToOutput = null;
landUseToOutput = globalLandUseRaster;
if (landUseToOutput != null) {
LpjgOutputer lpjOutputer = new LpjgOutputer(outputYear, landUseToOutput);
lpjOutputer.writeOutput();
}
outputWaterAvailablity(timestep, currentIrrigationData); // uses the year directory structure created above
if (timestep.isInitialTimestep() && ModelConfig.GENERATE_NEW_YIELD_CLUSTERS)
outputClusters(clusterIdRaster);
// Output LandUses to tabular file, for analysis (perhaps)
LogWriter.println("Outputing land uses Year: " + timestep.getYear());
LandUseOutputer landuseOutputer = new LandUseOutputer(timestep.getYear(), globalLandUseRaster);
LogWriter.println("Outputing forestry data Year: " + timestep.getYear());
ForestryDataOutputer forestryOutputer = new ForestryDataOutputer(timestep.getYear(), globalLandUseRaster, woodYieldData);
forestryOutputer.writeOutput();
// don't really need this a LPJ outputs have same data, although in a slightly different format
// outputLandCover(timestep.getYear(), landUseRaster, LandCoverType.CROPLAND);
// outputLandCover(timestep.getYear(), landUseRaster, LandCoverType.PASTURE);
private void outputWaterAvailablity(Timestep timestep, IrrigationRasterSet irrigiationRS) {
new RasterOutputer<Double, IrrigationItem>(irrigiationRS, ModelConfig.OUTPUT_DIR + File.separator + timestep.getYear() + File.separator + "IrrigConstraint.asc") {
@Override
public Double getValue(RasterKey location) {
IrrigationItem item = results.get(location);
if (item == null)
return null;
return item.getIrrigConstraint();
}
}.writeOutput();
}
private void outputClusters(RasterSet<IntegerRasterItem> landUseRaster) {
new RasterOutputer<Integer, IntegerRasterItem>(landUseRaster, ModelConfig.CLUSTERED_YIELD_FILE) {
public Integer getValue(RasterKey location) {
IntegerRasterItem item = results.get(location);
}.writeOutput();
public RasterSet<IntegerRasterItem> getClusterRaster() {
RasterSet<IntegerRasterItem> clusters = new RasterSet<IntegerRasterItem>(desiredProjection) {
private static final long serialVersionUID = 2467452274591854417L;
@Override
protected IntegerRasterItem createRasterData() {
return new IntegerRasterItem(0);
}
};
IntegerRasterReader clusterReader = new IntegerRasterReader(clusters);
clusterReader.getRasterDataFromFile(ModelConfig.CLUSTERED_YIELD_FILE);
return clusters;
}
public CountryBoundaryRaster getCountryBoundaryRaster() {
CountryBoundaryRaster countryBoundaries = new CountryBoundaryRaster(desiredProjection);
CountryBoundaryReader countryReader = new CountryBoundaryReader(countryBoundaries);
countryReader.getRasterDataFromFile(ModelConfig.COUNTRY_BOUNDARY_FILE);
public void createCountryAgents(Collection<CompositeCountry> countryGrouping) {
countryAgents = new CountryAgentManager(demandManager, countryBoundaryRaster, internationalMarket, clusterIdRaster, globalLandUseRaster);
R0slyn
committed
Map<CompositeCountry, Map<CropType, CropUsageData>> cropUsageDataMap = getInitialCropUsageData();
Map<CompositeCountry, Map<WoodType, WoodUsageData>> woodUsageDataMap;
woodUsageDataMap = getInitialWoodUsageData();
Map<CompositeCountry, CarbonUsageData> carbonUsageDataMap = getInitialCarbonUsageData();
RasterSet<LandUseItem> initLU = getInitialLandUse();
for (CompositeCountry cc : countryGrouping) {
countryAgents.addForCountry(cc, cropUsageDataMap, initLU, woodUsageDataMap, carbonUsageDataMap);
private RasterSet<LandUseItem> getInitialLandUse() {
RasterSet<LandUseItem> initialLU;
initialLU = getLandUseFromBaseline();
initialLU = deserializeLandUse();
return initialLU;
R0slyn
committed
private Map<CompositeCountry, Map<CropType, CropUsageData>> getInitialCropUsageData() {
Map<CompositeCountry, Map<CropType, CropUsageData>> cropUsageDataMap;
if (ModelConfig.IS_CALIBRATION_RUN || ModelConfig.USE_INITIAL_CROP_USAGE_DATA)
cropUsageDataMap = new CropUsageReader().getCommodityData();
R0slyn
committed
else
cropUsageDataMap = deserializeCropUsage();
return cropUsageDataMap;
}
private Map<CompositeCountry, Map<WoodType, WoodUsageData>> getInitialWoodUsageData() {
Map<CompositeCountry, Map<WoodType, WoodUsageData>> woodUsageDataMap;
if (ModelConfig.IS_CALIBRATION_RUN) {
woodUsageDataMap = demandManager.getInitialWoodUsage();
} else {
woodUsageDataMap = deserializeWoodUsage();
}
return woodUsageDataMap;
private Map<CompositeCountry, CarbonUsageData> getInitialCarbonUsageData() {
Map<CompositeCountry, CarbonUsageData> carbonUsageDataMap;
if (ModelConfig.IS_CALIBRATION_RUN) {
carbonUsageDataMap = new HashMap<CompositeCountry, CarbonUsageData>();
for (CompositeCountry cc : CountryManager.getInstance().getAllCompositeCountries()) {
CarbonUsageData cuData = new CarbonUsageData(0, 0, 0);
carbonUsageDataMap.put(cc, cuData);
}
} else {
carbonUsageDataMap = deserializeCarbonUsage();
}
return carbonUsageDataMap;
}
private ElasticDemandManager getElasticDemandManager(BaseConsumpManager baseConsumpManager, CalorieManager calorieManager) {
return (ModelConfig.IS_CALIBRATION_RUN) ?
new ElasticDemandManager(ModelConfig.SSP_SCENARIO, baseConsumpManager, calorieManager) :
deserializeElasticDemandManager(baseConsumpManager, calorieManager);
}
private void serializeElasticDemandManager() {
if (demandManager instanceof ElasticDemandManager) {
String fileStr = ModelConfig.IS_CALIBRATION_RUN ? ModelConfig.SERIALIZED_DEMAND_MANAGER_FILE : ModelConfig.CHECKPOINT_DEMAND_MANAGER_FILE;
LogWriter.println("Starting serializing ElasticDemandManager to " + fileStr);
FileOutputStream fileOut = new FileOutputStream(fileStr);
ObjectOutputStream out = new ObjectOutputStream(fileOut);
out.writeObject(demandManager);
out.close();
fileOut.close();
LogWriter.println("Serialized data is saved");
}
else {
LogWriter.println("Not a ElasticDemandManager so not serializing");
}
} catch (IOException i) {
i.printStackTrace();
}
}
private ElasticDemandManager deserializeElasticDemandManager(BaseConsumpManager baseConsumpManager, CalorieManager calorieManager) {
String fileStr = ModelConfig.SERIALIZED_DEMAND_MANAGER_FILE;
FileInputStream fileIn = new FileInputStream(fileStr);
ObjectInputStream in = new ObjectInputStream(fileIn);
edm = (ElasticDemandManager) in.readObject();
edm.setup(ModelConfig.SSP_SCENARIO, baseConsumpManager, calorieManager);
in.close();
fileIn.close();
LogWriter.println("Deserialized " + fileStr);
} catch (Exception i) {
LogWriter.printlnError("Problem deserializing " + fileStr);
LogWriter.print(i);
System.exit(-1);
return null;
}
}
@SuppressWarnings("unchecked")
private Map<CompositeCountry, Map<WoodType, WoodUsageData>> deserializeWoodUsage() {
try {
Map<CompositeCountry, Map<WoodType, WoodUsageData>> woodUsageDataMap;
FileInputStream fileIn = new FileInputStream(ModelConfig.SERIALIZED_WOOD_USAGE_FILE);
ObjectInputStream in = new ObjectInputStream(fileIn);
woodUsageDataMap = (Map<CompositeCountry, Map<WoodType, WoodUsageData>>) in.readObject();
in.close();
fileIn.close();
LogWriter.println("Deserialized " + ModelConfig.SERIALIZED_WOOD_USAGE_FILE);
return woodUsageDataMap;
} catch (IOException i) {
LogWriter.printlnError("Problem deserializing " + ModelConfig.SERIALIZED_WOOD_USAGE_FILE);
LogWriter.print(i);
return null;
} catch (ClassNotFoundException c) {
LogWriter.printlnError("Map<CompositeCountry, Map<WoodType, WoodUsageData>> not found");
c.printStackTrace();
return null;
}
}
@SuppressWarnings("unchecked")
private Map<CompositeCountry, CarbonUsageData> deserializeCarbonUsage() {
try {
Map<CompositeCountry, CarbonUsageData> carbonUsageDataMap;
FileInputStream fileIn = new FileInputStream(ModelConfig.SERIALIZED_CARBON_USAGE_FILE);
ObjectInputStream in = new ObjectInputStream(fileIn);
carbonUsageDataMap = (Map<CompositeCountry, CarbonUsageData>) in.readObject();
in.close();
fileIn.close();
LogWriter.println("Deserialized " + ModelConfig.SERIALIZED_CARBON_USAGE_FILE, 2);
return carbonUsageDataMap;
} catch (IOException i) {
LogWriter.printlnError("Problem deserializing " + ModelConfig.SERIALIZED_CARBON_USAGE_FILE);
LogWriter.print(i);
return null;
} catch (ClassNotFoundException c) {
LogWriter.printlnError("Map<CompositeCountry, CarbonUsageData> not found");
c.printStackTrace();
return null;
}
}
private void serializeLandUse(RasterSet<LandUseItem> landUseRaster) {
RasterSet<LandUseItem> rasterToSerialise = new RasterSet<LandUseItem>(desiredProjection);
String landUseFileStr = ModelConfig.IS_CALIBRATION_RUN ? ModelConfig.SERIALIZED_LAND_USE_FILE : ModelConfig.CHECKPOINT_LAND_USE_FILE;
for (Map.Entry<RasterKey, LandUseItem> entry : landUseRaster.entrySet()) {
LandUseItem newLuItem = new LandUseItem(entry.getValue());
rasterToSerialise.put(entry.getKey(), newLuItem);
}
LogWriter.println("Starting serializing LandUse to " + landUseFileStr);
FileOutputStream fileOut = new FileOutputStream(landUseFileStr);
ObjectOutputStream out = new ObjectOutputStream(fileOut);
out.close();
fileOut.close();
LogWriter.println("Serialized data is saved");
@SuppressWarnings("unchecked")
private RasterSet<LandUseItem> deserializeLandUse() {
try {
RasterSet<LandUseItem> initLU;
FileInputStream fileIn = new FileInputStream(ModelConfig.SERIALIZED_LAND_USE_FILE);
ObjectInputStream in = new ObjectInputStream(fileIn);
initLU = (RasterSet<LandUseItem>) in.readObject();
in.close();
fileIn.close();
LogWriter.println("Deserialized " + ModelConfig.SERIALIZED_LAND_USE_FILE);
LogWriter.printlnError("Problem deserializing " + ModelConfig.SERIALIZED_LAND_USE_FILE);
LogWriter.print(i);
return null;
LogWriter.printlnError("RasterSet<LandUseItem> class not found");
c.printStackTrace();
return null;
}
R0slyn
committed
@SuppressWarnings("unchecked")
private Map<CompositeCountry, Map<CropType, CropUsageData>> deserializeCropUsage() {
try {
Map<CompositeCountry, Map<CropType, CropUsageData>> initCropUsage;
FileInputStream fileIn = new FileInputStream(ModelConfig.SERIALIZED_CROP_USAGE_FILE);
ObjectInputStream in = new ObjectInputStream(fileIn);
initCropUsage = (Map<CompositeCountry, Map<CropType, CropUsageData>>) in.readObject();
in.close();
fileIn.close();
LogWriter.println("Deserialized " + ModelConfig.SERIALIZED_CROP_USAGE_FILE);
return initCropUsage;
} catch (IOException i) {
LogWriter.printlnError("Problem deserializing " + ModelConfig.SERIALIZED_CROP_USAGE_FILE);
LogWriter.print(i);
return null;
} catch (ClassNotFoundException c) {
LogWriter.printlnError("Map<CompositeCountry, Map<CropType, CropUsageData>> not found");
c.printStackTrace();
return null;
}
}
/** this is if we are starting from Hurtt of other initial land covers (so we don't have land uses and intensity data) */
private RasterSet<LandUseItem> getLandUseFromBaseline() {
RasterSet<LandCoverItem> initialLC = new RasterSet<LandCoverItem>(desiredProjection) {
private static final long serialVersionUID = 4642550777741425501L;
protected LandCoverItem createRasterData() {
return new LandCoverItem();
}
new MaxCropAreaReader(initialLC).getRasterDataFromFile(ModelConfig.HIGH_SLOPE_AREAS_FILE); // Fraction unavailable for conversion
new LandCoverReader(initialLC).getRasterDataFromFile(ModelConfig.INITAL_LAND_COVER_FILE); // Land cover fractions
new InitProtectedAreasReader(initialLC).getRasterDataFromFile(ModelConfig.PROTECTED_AREAS_FILE); // Protected fraction
new CropFractionReader(initialLC).getRasterDataFromFile(ModelConfig.CROP_FRACTIONS_FILE);
RasterSet<LandUseItem> landUseRaster = new RasterSet<LandUseItem>(initialLC.getHeaderDetails());
for (Map.Entry<RasterKey, LandCoverItem> entry : initialLC.entrySet()) {
//LogWriter.println(initialLC.getXCoordin(entry.getKey()) + " " + initialLC.getYCoordin(entry.getKey()));
landUseRaster.put(entry.getKey(), new LandUseItem(entry.getValue()));
private YieldRaster getYieldSurfaces(Timestep timestep) {
return lpjYieldReader.getRasterData(timestep);
/** Get irrigation data that does not change with time, should only be called once */
IrrigationRasterSet fixedIrrigData = new IrrigationRasterSet(desiredProjection, new FPUManager(desiredProjection));
new IrrigiationCostReader(fixedIrrigData).getRasterDataFromFile(ModelConfig.IRRIGATION_COST_FILE);
new IrrigationConstraintReader(fixedIrrigData).getRasterDataFromFile(ModelConfig.IRRIGATION_CONSTRAINT_FILE);
String baseTimestepRootDir = Timestep.getYearSubDir(ModelConfig.YIELD_DIR, ModelConfig.ELLIOTT_BASEYEAR); // needs to be Elliott base timestep
new RunOffReader(fixedIrrigData, true).getRasterDataFromFile(baseTimestepRootDir + File.separator + ModelConfig.IRRIG_RUNOFF_FILE);
fixedIrrigData.calcIrrigConstraintOffsets(); // should have everything we need to calc offset between Elliott and LPJ data
/** Get carbon flux data */
private void getCarbonFluxData(Timestep timestep) {
if (ModelConfig.IS_CARBON_ON) {
carbonFluxData = carbonFluxReader.getCarbonFluxes(globalLandUseRaster, timestep);
} else {
carbonFluxData = new CarbonFluxRasterSet(desiredProjection);
carbonFluxData.fillWithDefaults(globalLandUseRaster.keySet());
}
}
/** Get wood yield data */
private void getWoodYieldData(Timestep timestep) {
woodYieldData = woodYieldReader.getWoodYields(globalLandUseRaster, timestep, internationalMarket.getWoodPrices().get(WoodType.IND_ROUNDWOOD).getExportPrice());
} else {
woodYieldData = new WoodYieldRasterSet(desiredProjection);
}
}
/** Ugly in situ update of currentIrrigationData, better if IrrigationRasterSets were handled more immutably */
private void getUpdateIrrigationData(Timestep timestep) {
String rootDir = timestep.getYearSubDir(ModelConfig.YIELD_DIR);
IrrigationMaxAmountReader irrigMaxAmountReader = new IrrigationMaxAmountReader(currentIrrigationData, yieldSurfaces);
irrigMaxAmountReader.getRasterDataFromFile(rootDir + File.separator + ModelConfig.IRRIG_MAX_WATER_FILENAME);
if (!ModelConfig.USE_BLUE_WATER_FILE_IRRIG_CONSTRAINT) {
new RunOffReader(currentIrrigationData, false).getRasterDataFromFile(rootDir + File.separator + ModelConfig.IRRIG_RUNOFF_FILE);
currentIrrigationData.updateIrrigConstraints(timestep);
private void forceLandCoverChanges(Timestep timestep) {
String filePath = ModelConfig.FORCED_LCC_FILES_DIR + File.separator + "forcedLcc" + timestep.getYear() + ".txt";
File lccFile = new File(filePath);
if (lccFile.exists()) {
RasterSet<ForcedLccItem> forcedLccRaster = new RasterSet<ForcedLccItem>(desiredProjection) {
protected ForcedLccItem createRasterData() {
return new ForcedLccItem();
}
};
LandCoverChangeReader lccReader = new LandCoverChangeReader(forcedLccRaster);
lccReader.getRasterDataFromFile(filePath);
countryAgents.forceLandCoverChangesForAll(forcedLccRaster);
}
}

Peter Alexander
committed
private void serializeCheckpoint() {

Peter Alexander
committed
countryAgents.serializeCropUsageForAll();
countryAgents.serializeWoodUsageForAll();
countryAgents.serializeCarbonUsageForAll();

Peter Alexander
committed
internationalMarket.serializeGlobalPrices();
serializeElasticDemandManager();
saveConfig();
}
private void saveConfig() {
try {
FileWriter fstream = new FileWriter(ModelConfig.CALIB_CONFIG_FILE);
BufferedWriter outputFile = new BufferedWriter(fstream);
for (Field field : ModelConfig.class.getDeclaredFields()) {
String parameter = field.getName();
Class<?> paramType = field.getType();
if (paramType.getName().equals("double")) {
Double value = field.getDouble(parameter);
outputFile.write(String.format("%s=%s", parameter, value));
outputFile.newLine();
}
}
outputFile.close();
} catch (IOException i) {
i.printStackTrace();
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
}

Peter Alexander
committed
}
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
// Checks for data coverage. Fills missing data with defaults. Currently this is zeroes but plan to use
// nearest neighbour interpolation eventually.
private void handleMissingData() {
int totalCount = 0;
int coverage = 0;
for (RasterKey key : globalLandUseRaster.keySet()) {
totalCount ++;
boolean hasCropYields = yieldSurfaces.containsKey(key);
boolean hasIrrig = currentIrrigationData.containsKey(key);
boolean hasWoodYields = !ModelConfig.IS_FORESTRY_ON || woodYieldData.containsKey(key);
boolean hasCarbonFluxes = !ModelConfig.IS_CARBON_ON || carbonFluxData.containsKey(key);
boolean isComplete = (hasCropYields && hasIrrig && hasWoodYields && hasCarbonFluxes);
if (isComplete) {
coverage++;
}
if (!hasCropYields)
yieldSurfaces.put(key, YieldResponsesItem.getDefault());
if (!hasIrrig)
currentIrrigationData.put(key, IrrigationItem.getDefault());
if (!hasWoodYields)
woodYieldData.put(key, WoodYieldItem.getDefault());
if (!hasCarbonFluxes)
carbonFluxData.put(key, CarbonFluxItem.getDefault());
}
double coveragePc = (double) coverage / totalCount * 100;
LogWriter.println(String.format("Data coverage: %.2f%%", coveragePc), 1);
}