diff --git a/.gitignore b/.gitignore
index 89d48e72d06a70407edea87d3cbe7629ca2dc958..53efb632b6fab50c4cadb362bc6958c3202c1df1 100644
--- a/.gitignore
+++ b/.gitignore
@@ -8,3 +8,6 @@ inst/doc
 .RDataTmp2
 .RDataTmp3
 test
+.RDataTmp3
+libarchive-3.5.2
+libarchive-3.5.2.tar.gz
diff --git a/data/.gitignore b/data/.gitignore
deleted file mode 100644
index b1e54437876c9b8433e283a46992e87c816bf1b5..0000000000000000000000000000000000000000
--- a/data/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-PARCELLE.rda
-meteo.rda
diff --git a/data/PARCELLE.rda b/data/PARCELLE.rda
index 96997417e8f6863f471a223f80e44bd4cdee0dec..11f32b71f2d93d30f7ff210e828e9096979ef780 100644
Binary files a/data/PARCELLE.rda and b/data/PARCELLE.rda differ
diff --git a/data/configCC.rda b/data/configCC.rda
new file mode 100644
index 0000000000000000000000000000000000000000..da8d7434ff44c8feba46f4e687d31cb77ba0667f
Binary files /dev/null and b/data/configCC.rda differ
diff --git a/work_in_progress/Carrying capacity.R b/work_in_progress/Carrying capacity.R
new file mode 100644
index 0000000000000000000000000000000000000000..e5210c3ba5e95c33f770cb0c2eaf538af0f7b33a
--- /dev/null
+++ b/work_in_progress/Carrying capacity.R	
@@ -0,0 +1,304 @@
+### Date of change: 13/04
+
+rm(list = ls(all.names = TRUE)) # clear all objects includes hidden objects.
+gc() # free up memory and report the memory usage.
+
+devtools::install(quick = T)
+
+how <- "EWY"
+# how <- "EWY_migale"
+# how <- "PACHKA"
+#how <- "PACHKA_migale"
+
+if(how == "PACHKA_migale")
+setwd("/save_home/phammami/dengue-risk-assessment")
+### Carrying capacity
+
+### Load packages
+library(terra)
+library(tidyterra)
+library(magrittr)
+library(data.table)
+library(ggplot2)
+# library(sf)
+# library(dplyr)
+# library(rgeos)
+# library(raster) # To read POP
+
+####
+### URBAN ATLAS
+####
+
+## IRIS GE
+if(who ==  "EWY")
+  filename <- "test/data/IRIS-GE/IRIS_GE.SHP" else
+  filename <- "work_in_progress/data/IRIS-GE/IRIS_GE.SHP"
+PARCELLES <- vect(filename)
+
+  # PARCELLES_df <- PARCELLES %>% as.data.frame
+  # PARCELLES %<>% .[startsWith(PARCELLES$CODE_IRIS, "34"),] # Choose dpt 34
+  ## Choose Montpellier
+  # PARCELLES <- PARCELLES[PARCELLES$NOM_COM == "Montpellier",]
+
+  ## Load table of correspondance 'configKL_OCS_UA_new.csv'
+
+  if(how ==  "EWY")
+    configKL <- read.csv2("work_in_progress/data/configKL/Montpellier/TRAVAIL/CSV/KL/configKL_OCS_UA_new.csv") else
+      configKL <- read.csv("work_in_progress/data/CC/configKL_OCS_UA_new.csv",
+                           sep = ";",
+                           header=T,
+                           stringsAsFactors=FALSE,
+                           fileEncoding="latin1")
+
+  names(configKL)[3:4] <- c("nbL_ha", "p_gite_anthro")
+
+  files_done <- c()
+
+  if(how == "PACHKA_migale")
+    list_files <- list.files("/work_home/phammami/dengue-risk-assessment/work_in_progress/data/Urban_Atlas_2018") else
+    list_files <- list.files("work_in_progress/data/Urban_Atlas_2018")
+
+list_files %<>% .[!grepl(".zip", .)]
+# path <- list_files
+
+start_time <- Sys.time()
+for(path in list_files){
+
+  ## Load Urban Atlas shapefile
+
+  print(path)
+  print(Sys.time())
+
+  ## Load Urban Atlas shapefile
+
+  if(how == "PACHKA_migale")
+    path_base <- list.files("/work_home/phammami/dengue-risk-assessment/work_in_progress/data/Urban_Atlas_2018") else
+      path_base <-"/work_home/phammami/dengue-risk-assessment/work_in_progress/data/Urban_Atlas_2018/"
+
+  URBAN_ATLAS <- vect(paste0(
+    path_base,
+    path,
+    "/Data/",
+    path,
+    ".gpkg"
+  ), layer = gsub("_v013", "", path))
+
+  message("urban atlas loaded")
+  ## cut polygons that would cross IRIS-GE boundaries
+  ###### tests
+  ### Firstly, transform URBAN_ATLAS into the same CRS as PARCELLES
+  # crs(URBAN_ATLAS, proj = TRUE)
+  # cat(crs(URBAN_ATLAS), "\n")
+  URBAN_ATLAS <- project(URBAN_ATLAS, PARCELLES)
+
+  ## Identify areas overlapping, cut the  polygons from URBAN_ATLAS that are overlapping PARCELLES's boundaries
+  y <- ext(URBAN_ATLAS)
+  y <- crop(PARCELLES, y)
+  message("Croping urban atlas is done")
+
+  d <- seq(nrow(y))
+
+  d <- split(d, ceiling(seq_along(d)/20))
+
+  runCC <- FALSE
+
+  if("CC.Rda" %in% list.files("work_in_progress/data")){
+    load(file = "work_in_progress/data/CC.Rda")
+    if(table(PARCELLES_df[CODE_IRIS %in% y$CODE_IRIS,is.na(sum_areas)])[["TRUE"]] %>%
+       is_greater_than(table(PARCELLES_df[CODE_IRIS %in% y$CODE_IRIS,is.na(sum_areas)])[["FALSE"]]))
+      runCC <- TRUE
+  } else runCC <- TRUE
+
+  if(runCC)
+    for(i in seq(length(d))){
+
+      PATCH <- intersect(y[d[[i]],], URBAN_ATLAS)
+      if(nrow(PATCH)>0){
+        #plot(URBAN_ATLAS, col=rainbow(4))
+        message("Intersection is done")
+
+        # area in hectare
+        PATCH$area_ha <- expanse(PATCH, unit = "ha")
+
+        ## merge urban atlas attribute to table configKL_OCS_UA_new by.x code_2018 and by.y CODE
+        carrying_capacity <-
+          merge(configKL, PATCH, by.x = "CODE", by.y = "code_2018")
+
+        #take of unused column
+        carrying_capacity <-
+          subset(
+            carrying_capacity,
+            select = -c(fua_code, comment, country, class_2018, identifier, prod_date)
+          )
+
+        ## new column A = Kltot * area (check the area unit to transform it into a hectare if necessary)
+        carrying_capacity$Kltot_area <-
+          carrying_capacity$nbL_ha * carrying_capacity$area_ha
+
+        ## Klfix = A * % gites anthropique
+        carrying_capacity$p_gite_anthro %<>% as.numeric # convert in number
+        carrying_capacity$Klfix <-
+          carrying_capacity$Kltot_area * carrying_capacity$p_gite_anthro
+        ## Klvar = A * (1 - % gites anthropique)
+        carrying_capacity$Klvar <-
+          carrying_capacity$Kltot_area * (1 - carrying_capacity$p_gite_anthro)
+
+        ## summaries per PARCELLE/CODE_IRIS (sum of KLvar and sum of KLfix and area_ha)
+        # PARCELLES
+        setDT(carrying_capacity)
+        carrying_capacity[, sum_areas := sum(area_ha), by = CODE_IRIS]
+        # carrying_capacity[, c("CODE_IRIS", "test_area")] %>% unique
+        carrying_capacity[, Kfix := sum(Klfix), by = CODE_IRIS]
+        carrying_capacity[, Kvar := sum(Klvar), by = CODE_IRIS]
+        carrying_capacity[, Pop2018 := sum(Pop2018), by = CODE_IRIS]
+
+
+        if ("CC.Rda" %in% list.files("work_in_progress/data"))
+          load(file = "work_in_progress/data/CC.Rda") else
+            PARCELLES_df <- as.data.table(PARCELLES)
+
+        carrying_capacity %<>% .[, c("CODE_IRIS", "Kfix", "Kvar", "sum_areas", "Pop2018"), with = F] %>% unique
+
+        carrying_capacity %<>% .[order(CODE_IRIS), ]
+        PARCELLES_df %<>% .[order(CODE_IRIS), ]
+
+        PARCELLES_df[CODE_IRIS %in% carrying_capacity$CODE_IRIS, "Kfix"] <-
+          carrying_capacity$Kfix
+        PARCELLES_df[CODE_IRIS %in% carrying_capacity$CODE_IRIS, "Kvar"] <-
+          carrying_capacity$Kvar
+        PARCELLES_df[CODE_IRIS %in% carrying_capacity$CODE_IRIS, "sum_areas"] <-
+          carrying_capacity$sum_areas
+        PARCELLES_df[CODE_IRIS %in% carrying_capacity$CODE_IRIS, "Pop2018"] <-
+          carrying_capacity$Pop2018
+
+        save(PARCELLES_df, file = "work_in_progress/data/CC.Rda")
+
+        rm(carrying_capacity)
+        rm(PARCELLES_df)
+        rm(PATCH)
+
+        message(paste(c(i ,"/", length(d))))
+      }}
+
+  rm(URBAN_ATLAS)
+  rm(y)
+}
+end_time <- Sys.time()
+end_time - start_time
+
+
+#######
+####### Explore
+#######
+
+
+if(how == "EWY")
+load(file = "/dengue-risk-assessment/work_in_progress/data/CC.Rda")
+if(how == "EWY_migale")
+load("/work_home/eortega/dengue-risk-assessment/work_in_progress/data/CC.Rda")
+
+# number of parcelles without CC
+PARCELLES_df[!is.na(Kfix),] %>% nrow
+
+# number of parcelles without values
+PARCELLES_df[is.na(Kfix),] %>% nrow
+
+PARCELLES$area_ha <-  terra::expanse(PARCELLES, unit = "ha")
+
+PARCELLES_df[match(PARCELLES$CODE_IRIS, PARCELLES_df$CODE_IRIS), area_ha := PARCELLES$area_ha]
+
+plot(PARCELLES_df[!is.na(sum_areas), area_ha] ~ PARCELLES_df[!is.na(sum_areas), sum_areas])
+
+PARCELLES_df[!is.na(sum_areas) & round(sum_areas,3) < round(area_ha,3), abs(sum_areas - area_ha)] %>% summary()
+PARCELLES_df[!is.na(sum_areas) & round(sum_areas,3) < round(area_ha,3), abs(sum_areas - area_ha)] %>%
+  data.frame(diff_surf = .) %>%
+  ggplot(., aes(x=diff_surf)) + geom_histogram(binwidth=.5)
+
+PARCELLES_df[!is.na(sum_areas) & round(sum_areas,3) < round(area_ha,3),]
+
+#  parcelles where evaluation with UA seems to include a small part of the total surface
+PARCELLES_df[!is.na(sum_areas) & round(sum_areas,3) < round(area_ha,3) & abs(sum_areas - area_ha) > 0.025*area_ha,]
+
+
+#######
+####### Work on BD TOPO to complete dataset
+#######
+
+if(how == "EWY")
+  load(file = "C:/Users/ORTEGA/Documents/dengue-risk-assessment/work_in_progress/data/CC.Rda")
+if(how == "EWY_migale")
+  load("/work_home/eortega/dengue-risk-assessment/work_in_progress/data/CC.Rda")
+
+  #Rename CODE_IRIS from PARCELLES_df as ID
+  colnames(PARCELLES_df)[colnames(PARCELLES_df) == "CODE_IRIS"] <- "ID"
+
+  ArboRisk::PARCELLE
+  merged_PARCELLES <- merge(PARCELLE[, -c(5:8)], PARCELLES_df[, c("ID",  "Kfix" ,    "Kvar" , "sum_areas")], by = "ID") ##Empty?
+
+  #PARCELLES_df <- merge(PARCELLE[, -c(5:8)], PARCELLES_df[, c("ID",  "Kfix" ,    "Kvar" , "sum_areas")],
+  #                      by = "ID")
+
+  ####
+  ### Without URBAN ATLAS
+  ####
+
+  ### Load shapefiles
+
+  #### Administrative unit (patch)
+
+  ## IRIS GE
+  filename <- "test/data/IRIS-GE/IRIS_GE.SHP"
+  PARCELLES <- vect(filename)
+  # Choose dpt 34
+  PARCELLES %<>% .[startsWith(PARCELLES$CODE_IRIS, "34"),]
+  PARCELLES$SURF_HA <- expanse(PARCELLES, unit = "ha")
+
+  #### TOPO Building
+  filename <- "test/data/TOPO/BATIMENT.shp" ## Files only for dpt 34
+  BUILDING <- vect(filename)
+
+  # Select the type of buildings to consider
+  # BUILDING_select <- BUILDING[BUILDING$USAGE1 ==  "Résidentiel",]
+
+  ### Calculate the number of each building in a polygon
+
+# test <- relate(BUILDING_select, PARCELLES[1:10,], "within")
+# PARCELLES[1:10,"nBATIMENTS_within"] <- test %>% colSums()
+#
+# test <- relate(BUILDING_select, PARCELLES[1:10,], "Overlaps")
+# PARCELLES[1:10,"nBATIMENTS_Overlaps"] <- test %>% colSums()
+#
+# test <- relate(BUILDING_select, PARCELLES[1:10,], "crosses")
+# PARCELLES[1:10,"nBATIMENTS_crosses"] <- test %>% colSums()
+#
+# test <- relate(BUILDING_select, PARCELLES[1:10,], "intersects")
+# PARCELLES[1:10,"nBATIMENTS_intersects"] <- test %>% colSums()
+#
+# test <- relate(BUILDING_select, PARCELLES[1:10,], "touches")
+# PARCELLES[1:10,"nBATIMENTS_touches"] <- test %>% colSums()
+
+# Count the number of building for each polygon
+count_bati <- relate(BUILDING, PARCELLES, "intersects")
+PARCELLES[, "nBATIMENTS"] <- count_bati %>% colSums()
+
+## Add carrying capacity data
+# Multiply the number of buildings by the number of larval site
+PARCELLES[, "nGites"] <- PARCELLES$nBATIMENTS * 4
+
+# Multiply the number of larval site by the number of larvae
+PARCELLES[, "nLarves"] <- PARCELLES$nGites * 10
+
+## Number of larvae per hectare
+PARCELLES[, "nLarves_ha"] <- PARCELLES$nLarves / PARCELLES$SURF_HA
+
+## Compare with jeux de données test
+PARCELLES[, "CC"] <- PARCELLES$KLfix + PARCELLES$KLvar
+PARCELLES[, "diff_CC"] <- PARCELLES$CC - PARCELLES$nLarves
+
+  PARCELLES %>% as.data.frame
+  summary(PARCELLES$nLarves_ha)
+
+  plot(PARCELLES, "diff_CC", type = "continuous")
+
+## Save PARCELLES
+save(PARCELLES, file = "work_in_progress/data/CC_TOPO.Rda")
diff --git "a/work_in_progress/script t\303\251l\303\251chargement BD TOPO.R" "b/work_in_progress/script t\303\251l\303\251chargement BD TOPO.R"
new file mode 100644
index 0000000000000000000000000000000000000000..98c56b578b8b44bacf6ff95e08a02d0668345ed2
--- /dev/null
+++ "b/work_in_progress/script t\303\251l\303\251chargement BD TOPO.R"	
@@ -0,0 +1,389 @@
+### Date of change: 13/04
+
+rm(list = ls(all.names = TRUE)) # clear all objects includes hidden objects.
+gc() # free up memory and report the memory usage.
+
+### Load packages
+library(terra)
+library(tidyterra)
+library(magrittr)
+library(data.table)
+library(ggplot2)
+library(stringr)
+library(readr)
+
+how <- "EWY"
+#how <- "PACHKA"
+where <- "migale"
+#where <- "computer"
+
+if(how == "EWY" & where == "migale")
+path2proj <- '/work_home/eortega/'
+
+if(how == "PACHKA" & where == "migale")
+  path2proj <- '/work_home/phammami/'
+
+if(where == "computer")
+  path2proj <- ''
+
+options(timeout = max(90000, getOption("timeout")))
+
+############ CLC
+
+if(how ==  "EWY")
+  configKL <- read.csv2(paste0(path2proj , "work_in_progress/data/configKL/Montpellier/TRAVAIL/CSV/KL/configKL_OCS_UA_new.csv")) else
+    configKL <- read.csv(paste0(path2proj , "work_in_progress/data/CC/CLC_CC.csv"),
+                         sep = ";",
+                         header=T,
+                         stringsAsFactors=FALSE,
+                         fileEncoding="latin1")
+
+names(configKL)[1:4] <- c("CODE", "Label", "nbL_ha", "p_gite_anthro")
+setDT(configKL)
+configKL %<>% unique
+configKL[, CODE := str_sub(CODE, 1, 3)]
+configKL[, Label := NULL]
+configKL %<>% .[, lapply(.SD, function(x) mean(x, na.rm =T)), by=CODE]
+
+configKL %<>% unique
+
+  filename <- paste0(path2proj , "work_in_progress/data/CLC/CLC12_FR_RGF_SHP/CLC12_FR_RGF.shp")
+
+   reg_poly <- vect(filename)
+
+  names(reg_poly)[names(reg_poly) == "ID"] <- "ID_CLC"
+
+  SpatVec <- "data/SpatVec.shp" %>% terra::vect(.)
+
+  reg_poly_PATCH <- intersect(SpatVec, reg_poly)
+
+  # area in hectare
+  reg_poly_PATCH$area_ha <- expanse(reg_poly_PATCH, unit = "ha")
+
+  ## merge urban atlas attribute to table configKL_OCS_UA_new by.x code_2018 and by.y CODE
+  carrying_capacity <-
+    merge(configKL, reg_poly_PATCH %>% as.data.frame, by.x = "CODE", by.y = "CODE_12", all.y =T)
+
+  #take of unused column
+  carrying_capacity %<>%
+    subset(
+      .,
+      select = c(ID, AREA_HA, area_ha, nbL_ha, p_gite_anthro, Klfix, Klvar)
+    ) %>% unique
+
+  ## new column A = Kltot * area (check the area unit to transform it into a hectare if necessary)
+  carrying_capacity[, Kltot_area := round(nbL_ha * area_ha)]
+
+  ## Klfix = A * % gites anthropique
+  carrying_capacity[, Klfix := round(Kltot_area * p_gite_anthro)]
+  ## Klvar = A * (1 - % gites anthropique)
+  carrying_capacity[, Klvar := round(Kltot_area * (1-p_gite_anthro))]
+
+  ## summaries per PARCELLE/CODE_IRIS (sum of KLvar and sum of KLfix and area_ha)
+  # PARCELLES
+  carrying_capacity[, sum_areas := sum(area_ha), by = ID]
+  # carrying_capacity[, c("CODE_IRIS", "test_area")] %>% unique
+  carrying_capacity[, Kfix := sum(Klfix), by = ID]
+  carrying_capacity[, Kvar := sum(Klvar), by = ID]
+
+
+  if ("CC.Rda" %in% list.files("work_in_progress/shared_data"))
+    load(file = "work_in_progress/shared_data/CC.Rda") else
+      PARCELLES_df <- as.data.table(PARCELLES)
+
+  carrying_capacity %<>% .[, c("ID", "AREA_HA", "nbL_ha", "p_gite_anthro", "Kfix", "Kvar", "sum_areas"), with = F] %>% unique
+
+  carrying_capacity %<>% .[order(ID), ]
+
+  PARCELLES_df[match(carrying_capacity$ID, CODE_IRIS),Kfix_CLC := carrying_capacity$Kfix]
+  PARCELLES_df[match(carrying_capacity$ID, CODE_IRIS), Kvar_CLC := carrying_capacity$Kvar]
+  PARCELLES_df[match(carrying_capacity$ID, CODE_IRIS), sum_areas_CLC := carrying_capacity$sum_areas]
+
+  save(PARCELLES_df, file = "work_in_progress/shared_data/CC.Rda")
+
+##########
+### Download data
+##########
+listdpt <- c(str_sub(paste0("00", c(1:96)), -3), "02A", "02B") ###, paste0("97", 1:8) on ne fait pas les dom pour l'instant
+
+# Ewy -- attention tu as deux fois 976
+# 001","002","003","004","005", "006","007","008","009","010","011","012","013","014","015","016","017","018","019", "021","022","023","024","025","026","027","028","029","030","031","032","033","034","035","036","037","038","039","040","041","042","043",
+#              "044","045","046","047","048","049","050","051","052","053","054","055","056","057","058","059","060","061","062","063","064","065","066",
+#              "067","068","069","070","071","072","073","074","075","076","077","078","079","080","081","082","083","084","085","086","087","088","089",
+#              "090","091","092","093","094","095","096", "971", "972","973","974", "975", "976", "976", "977", "978", "02A", "02B")
+
+
+#### download dataset from bdtopo.gouv
+### Download for each dpt
+for(dpt in listdpt){
+  url <- paste0("https://wxs.ign.fr/859x8t863h6a09o9o6fy4v60/telechargement/prepackage/BDTOPOV3-TOUSTHEMES-DEPARTEMENT-PACK_231$BDTOPO_3-3_TOUSTHEMES_SHP_LAMB93_D",
+                dpt,
+                "_2023-03-15/file/BDTOPO_3-3_TOUSTHEMES_SHP_LAMB93_D",
+                dpt,
+                "_2023-03-15.7z")
+
+  filename <- paste0("BDTOPO_3-3_TOUSTHEMES_SHP_LAMB93_D", dpt, "_2023-03-15.7z")
+
+  if(!filename %in% list.files("/work_home/eortega/dengue-risk-assessment/work_in_progress/data/BD_TOPO/"))
+    download.file(url,
+                  destfile = paste0("/work_home/eortega/dengue-risk-assessment/work_in_progress/data/BD_TOPO/",filename),
+                  mode="wb")
+
+  print(paste0("Downloaded ", filename))
+}
+
+### extract and analyse bdtopo shape
+for(dpt in listdpt){
+
+  f <- system.file("data/SpatVec.shp", package="ArboRisk")
+  PARCELLES <- terra::vect(f)
+
+  # Choose dpt
+  PARCELLES %<>% .[startsWith(PARCELLES$ID, stringr::str_sub(dpt, start= -2)),]
+  PARCELLES$SURF_HA <- expanse(PARCELLES, unit = "ha")
+
+  output_dir <- paste0("/work_home/eortega/dengue-risk-assessment/work_in_progress/data/BD_TOPO/unpacked")
+  archive_file <- paste0("/work_home/eortega/dengue-risk-assessment/work_in_progress/data/BD_TOPO/BDTOPO_3-3_TOUSTHEMES_SHP_LAMB93_D", dpt, "_2023-03-15.7z")
+
+
+  system_command <- paste0("7z e -o",
+                         output_dir,
+                         " ",
+                         archive_file)
+  if(where == "migale")
+  system(system_command) else
+    archive::archive_extract(archive_file, output_dir)
+
+
+  ##########
+  ### Add number of buildings
+  ##########
+
+### Open building shapefile
+filename <- paste0("/work_home/eortega/dengue-risk-assessment/work_in_progress/data/BD_TOPO/unpacked")
+
+BUILDING <- vect(filename, layer = "BATIMENT")
+
+## Delete the unzip file once your done
+for(inside_files in list.files(filename))
+  file.remove(paste0(filename, "/",inside_files))
+
+file.remove(filename)
+
+# Count the number of building for each polygon
+count_bati <- relate(BUILDING, PARCELLES, "intersects")
+PARCELLES[, "nBATIMENTS"] <- count_bati %>% colSums()
+
+### Open CC.Rda file
+load("~/work/dengue-risk-assessment/work_in_progress/shared_data/CC.Rda")
+
+## Add to the CC.Rda file
+PARCELLES %<>% as.data.frame #Put PARCELLES as the same format
+PARCELLES %<>% .[, c("CODE_IRIS","nBATIMENTS")]
+PARCELLES_df[match(PARCELLES$CODE_IRIS, CODE_IRIS), nBATIMENTS := PARCELLES$nBATIMENTS]
+
+## Save
+save(PARCELLES_df, file = "~/work/dengue-risk-assessment/work_in_progress/shared_data/CC.Rda")
+
+# Delete counti_bati + PARCELLES_df
+rm(count_bati)
+rm(PARCELLES_df)
+rm(BUILDING)
+gc()
+}
+
+## Check outputs
+
+f <- system.file("data/SpatVec.shp", package="ArboRisk")
+SpatVec <- terra::vect(f)
+
+load("work_in_progress/shared_data/CC.Rda")
+setDT(PARCELLES_df)
+PARCELLES_NA<- PARCELLES_df[is.na(nBATIMENTS), ID]
+
+SpatVec[SpatVec$ID %in% PARCELLES_NA]
+SpatVec[SpatVec$ID %in% PARCELLES_NA] %>% plot
+
+
+##########
+### Estimate carrying capacity based on number of buildings
+##########
+### Openfiles
+load("~/work/dengue-risk-assessment/work_in_progress/shared_data/CC.Rda")
+load("work_in_progress/shared_data/CC.Rda")
+ArboRisk::PARCELLE
+
+## Choose the right unit
+PARCELLE$SURF_HA
+
+## Add carrying capacity data
+# Multiply the number of buildings by the number of larval site
+PARCELLES_df[, "nGites"] <- PARCELLES_df$nBATIMENTS * 4
+
+# Multiply the number of larval site by the number of larvae
+PARCELLES_df[, "nLarves"] <- PARCELLES_df$nGites * 10
+
+## Number of larvae per hectare = ktot
+PARCELLES_df[, "nLarves_ha"] <- PARCELLES_df$nLarves / PARCELLE$SURF_HA
+
+# ## Kfix = Number of larvae per hectare * number of larval site
+# Kfix_TP <- PARCELLES_df[, "nLarves_ha"] * PARCELLES_df[, "nGites"]
+#
+# ## Kvar = Number of larvae per hectare * (1- number of larval site)
+# kvar_TP <- PARCELLES_df[, "nLarves_ha"] * (1 - PARCELLES_df[, "nGites"])
+
+## round number
+PARCELLES_df[,Kfix_UA := round(Kfix)]
+PARCELLES_df[,Kvar_UA := round(Kfix)]
+
+PARCELLES_df[,Kfix_TP := round(Kfix_TP)]
+PARCELLES_df[,Kvar_TP := round(Kfix_TP)]
+
+PARCELLES_df[, `:=`(nBATIMENTS = NULL,
+                    Kfix = NULL,
+                    Kvar = NULL,
+                    NOM_COM = NULL,
+                    IRIS = NULL,
+                    INSEE_COM = NULL,
+                    NOM_IRIS = NULL,
+                    TYP_IRIS = NULL)]
+
+
+#Rename CODE_IRIS from PARCELLES_df as ID
+colnames(PARCELLES_df)[colnames(PARCELLES_df) == "CODE_IRIS"] <- "ID"
+
+# Merge with PARCELLES from data(PARCELLE) by ID
+merged_PARCELLES <- merge(PARCELLE, PARCELLES_df, by = "ID")
+
+merged_PARCELLES[, `:=`(KLfix = NULL,
+                        KLvar = NULL,
+                        KPvar = NULL,
+                        KPfix = NULL,
+                        NOM_COM = NULL)]
+
+# compare Kfix Kvar without urbanization vs UA vs BD TOPO
+merged_PARCELLES[, "CC_UA"] <- merged_PARCELLES$Kfix_UA + merged_PARCELLES$Kvar_UA
+merged_PARCELLES[, "diff_CC"] <- merged_PARCELLES$CC_UA - PARCELLES_df$nLarves_ha
+
+merged_PARCELLES[, "CC_TP"] <- merged_PARCELLES$Kfix_TP + merged_PARCELLES$Kvar_TP
+
+merged_PARCELLES$CC_TP[1:20]
+merged_PARCELLES$CC_UA[1:20]
+
+## Where CC_UA is non NA
+which(!is.na(merged_PARCELLES$CC_UA))
+## value
+merged_PARCELLES$CC_UA[which(!is.na(merged_PARCELLES$CC_UA))]
+
+## see if there is NA
+merged_PARCELLES[is.na(CC_TP), .N]
+
+### trying to plot
+data <- data.frame(kfix_UA = merged_PARCELLES$Kfix_UA,
+                   Kvar_UA = merged_PARCELLES$Kvar_UA,
+                   kfix_TP = merged_PARCELLES$Kfix_TP,
+                   Kvar_TP = merged_PARCELLES$Kvar_TP,
+                   SURF_HA = merged_PARCELLES$SURF_HA)
+
+ggplot(data = data, aes(x = kfix_UA, y = SURF_HA)) +
+  geom_point(color = "blue")
+ggplot(data = data, aes(x = kfix_TP, y = SURF_HA)) +
+  geom_point(color = "blue")
+
+
+# compare pop2018 et pop de PARCELLE (data(PARCELLE))
+merged_PARCELLES$Pop2018[1:20]
+merged_PARCELLES$POP[1:20]
+
+##########
+### Add CC.Rda from CLC to PARCELLE
+##########
+### Openfiles
+load("work_in_progress/shared_data/CC.Rda")
+data(PARCELLE)
+
+## change Kfix and Kvar names
+setnames(PARCELLES_df, "Kfix", "Kfix_UA")
+setnames(PARCELLES_df, "Kvar", "Kvar_UA")
+setnames(PARCELLES_df, "sum_areas", "sum_areas_UA")
+
+## round number
+PARCELLES_df$Kfix_UA <- round(PARCELLES_df$Kfix_UA)
+PARCELLES_df$Kvar_UA <- round(PARCELLES_df$Kvar_UA)
+
+PARCELLES_df[, `:=`(nBATIMENTS = NULL,
+                    NOM_COM = NULL,
+                    IRIS = NULL,
+                    INSEE_COM = NULL,
+                    NOM_IRIS = NULL,
+                    TYP_IRIS = NULL)]
+
+#Rename CODE_IRIS from PARCELLES_df as ID
+setnames(PARCELLES_df, "CODE_IRIS", "ID")
+
+# number of parcelles without CC_UA
+PARCELLES_df[!is.na(Kfix_UA),] %>% nrow
+
+# Merge with PARCELLES from data(PARCELLE) by ID
+merged_PARCELLES <- merge(PARCELLE, PARCELLES_df, by = "ID")
+
+merged_PARCELLES[, `:=`(KLfix = NULL,
+                    KLvar = NULL,
+                    KPfix = NULL,
+                    KPvar = NULL,
+                    POP = NULL)]
+
+### Keep k_UA, if the value is NA keep k_CLC + Keep POP_INSEE if the value is NA keep Pop_2018
+## Load population 2019 from https://www.insee.fr/fr/statistiques/6543200#dictionnaire
+pop_insee <- read_delim("C:/Users/ORTEGA/Downloads/base-ic-evol-struct-pop-2019.CSV",
+                        delim = ";", escape_double = FALSE, trim_ws = TRUE)
+
+## See if there is any missing value
+table(PARCELLE$ID %in% pop_insee$IRIS)
+## Which one are missing
+PARCELLE[!ID %in% pop_insee$IRIS]
+
+## Rename IRIS in ID in pop_insee
+setnames(pop_insee, "IRIS", "ID")
+
+## Merge pop_insee and merged_PARCELLES
+merged_PARCELLES <- merge(merged_PARCELLES, pop_insee[,c("ID", "P19_POP")], by = "ID")
+setnames(merged_PARCELLES, "P19_POP", "POP_INSEE")
+
+## Compare Pop2018 and P19_POP
+plot(merged_PARCELLES$POP_INSEE, merged_PARCELLES$Pop2018, main = "Relation entre POP_INSEE et Pop2018", xlab = "POP_INSEE", ylab = "Pop2018")
+
+# Final Kfix & kvar
+merged_PARCELLES[,  `:=`(Kfix = ifelse(is.na(Kfix_UA) | (round(sum_areas_UA) < round(SURF_HA)),
+                                        Kfix_CLC,
+                                        Kfix_UA),
+                          Kvar = ifelse(is.na(Kvar_UA) | (round(sum_areas_UA) < round(SURF_HA)),
+                          Kvar_CLC,
+                          Kvar_UA),
+
+                         POP = ifelse(is.na(POP_INSEE),
+                                       Pop2018,
+                                      POP_INSEE))]
+# other method same results
+# # Final Kfix -- TO DO Ewy update
+# merged_PARCELLES$Kfix <- ifelse(!is.na(merged_PARCELLES$Kfix_UA),
+#                                 merged_PARCELLES$Kfix_UA,
+#                                 merged_PARCELLES$Kfix_CLC)
+# # Final Kvar
+# merged_PARCELLES$Kvar <- ifelse(is.na(merged_PARCELLES$Kvar_UA) | (round(merged_PARCELLES$sum_areas_UA) < round(merged_PARCELLES$SURF_HA)),
+#                                 merged_PARCELLES$Kvar_CLC,
+#                                 merged_PARCELLES$Kvar_UA)
+
+
+merged_PARCELLES[, `:=`(Kfix_UA = NULL,
+                        Kvar_UA = NULL,
+                        Kfix_CLC = NULL,
+                        Kvar_CLC = NULL,
+                        POP_INSEE = NULL,
+                        Pop2018 = NULL,
+                        sum_areas_UA = NULL,
+                        sum_areas_CLC = NULL)]
+## Save PARCELLES
+PARCELLE = merged_PARCELLES
+save(PARCELLE, file = "data/PARCELLE.Rda")
+
diff --git a/work_in_progress/shared_data/CC.Rda b/work_in_progress/shared_data/CC.Rda
new file mode 100644
index 0000000000000000000000000000000000000000..c9fca4ef90f49ed39ef1cdbba002f8ea7deb7f78
Binary files /dev/null and b/work_in_progress/shared_data/CC.Rda differ
diff --git a/work_in_progress/test_intersect.R b/work_in_progress/test_intersect.R
new file mode 100644
index 0000000000000000000000000000000000000000..ec21bb418adfcf41ff56e693acd419652e29e0d9
--- /dev/null
+++ b/work_in_progress/test_intersect.R
@@ -0,0 +1,30 @@
+x1 <- vect("POLYGON ((0 0, 8 0, 8 9, 0 9, 0 0))")
+x2 <- vect("POLYGON ((10 4, 12 4, 12 7, 11 7, 11 6, 10 6, 10 4))")
+
+y1 <- vect("POLYGON ((5 6, 15 6, 15 15, 5 15, 5 6))")
+y2 <- vect("POLYGON ((8 2, 9 2, 9 3, 8 3, 8 2))")
+y3 <- vect("POLYGON ((2 6, 3 6, 3 8, 2 8, 2 6))")
+y4 <- vect("POLYGON ((2 12, 3 12, 3 13, 2 13, 2 12))")
+
+x <- rbind(x1, x2)
+values(x) <- data.frame(xid=1:2)
+crs(x) <- "+proj=utm +zone=1"
+
+y <- rbind(y1, y2, y3, y4)
+values(y) <- data.frame(yid=letters[1:4])
+crs(y) <- "+proj=utm +zone=1"
+
+plot(rbind(x, y), border=c(rep("red",2), rep("blue", 4)), lwd=2)
+text(x, "xid")
+text(y, "yid")
+
+v <- combineGeoms(x, y)
+plot(v, col=c("red", "blue"))
+
+v <- combineGeoms(x, y, boundary=FALSE, maxdist=1, minover=.05)
+plot(v, col=rainbow(4))
+
+
+v <- intersect(URBAN_ATLAS, PARCELLES)
+v <- intersect(PARCELLES, URBAN_ATLAS)
+plot(v, col=rainbow(4))