# MONTE CARLO
SIMULATION LCOE 6CENTRALES ROD
#
#
CAPITAL COST LOG NORMAL
IC=41600000;
IC
ICs=runif(1000000,
min=(IC+(IC*-0.3)), max=(IC+(IC*+0.3)) )
sIC
<- rlnorm(ICs, mean = mean(log(ICs)) , sd = sd(log(ICs)) )
#
INFLATION RATE NORMAL
ir
= 0.033
irs=runif(1000000,
min=(ir+(ir*-0.3)), max=(ir+(ir*+0.3)) )
sir
<- rnorm(irs, mean = mean(irs) , sd = sd(irs) )
#
DISCOUNT RATE NORMAL
dr
= 0.125
drs=runif(1000000,
min=(dr+(dr*-0.3)), max=(dr+(dr*+0.3)) )
sdr
<- rnorm(drs, mean = mean(drs) , sd = sd(drs) )
# OM COST LOGNORMAL
# SIMULATION UNE ET
UNE SEULE FOIS PAR PARAMETRE DANS LEQUATION
OM=
(0.06*IC)/( ((1+sample(sir,1, replace =F)
)/(1+sample(sdr,1, replace =F)) )^1)+
(0.06*IC)/( ((1+sample(sir,1, replace =F)
)/(1+sample(sdr,1, replace =F)) )^2)+
(0.06*IC)/( ((1+sample(sir,1, replace =F)
)/(1+sample(sdr,1, replace =F)) )^3)+
(0.06*IC)/( ((1+sample(sir,1, replace =F)
)/(1+sample(sdr,1, replace =F)) )^4)+
(0.06*IC)/( ((1+sample(sir,1, replace =F)
)/(1+sample(sdr,1, replace =F)) )^5)+
(0.06*IC)/( ((1+sample(sir,1, replace =F)
)/(1+sample(sdr,1, replace =F)) )^6)+
(0.06*IC)/( ((1+sample(sir,1, replace =F)
)/(1+sample(sdr,1, replace =F)) )^7)+
(0.06*IC)/( ((1+sample(sir,1, replace =F)
)/(1+sample(sdr,1, replace =F)) )^8)+
(0.06*IC)/( ((1+sample(sir,1, replace =F)
)/(1+sample(sdr,1, replace =F)) )^9)+
(0.06*IC)/( ((1+sample(sir,1, replace =F)
)/(1+sample(sdr,1, replace =F)) )^10)+
(0.06*IC)/( ((1+sample(sir,1, replace =F)
)/(1+sample(sdr,1, replace =F)) )^11)+
(0.06*IC)/( ((1+sample(sir,1, replace =F)
)/(1+sample(sdr,1, replace =F)) )^12)+
(0.06*IC)/( ((1+sample(sir,1, replace =F)
)/(1+sample(sdr,1, replace =F)) )^13)+
(0.06*IC)/( ((1+sample(sir,1, replace =F)
)/(1+sample(sdr,1, replace =F)) )^14)+
(0.06*IC)/( ((1+sample(sir,1, replace =F)
)/(1+sample(sdr,1, replace =F)) )^15)+
(0.06*IC)/( ((1+sample(sir,1, replace =F)
)/(1+sample(sdr,1, replace =F)) )^16)+
(0.06*IC)/( ((1+sample(sir,1, replace =F)
)/(1+sample(sdr,1, replace =F)) )^17)+
(0.06*IC)/( ((1+sample(sir,1, replace =F)
)/(1+sample(sdr,1, replace =F)) )^18)+
(0.06*IC)/( ((1+sample(sir,1, replace =F)
)/(1+sample(sdr,1, replace =F)) )^19)+
(0.06*IC)/( ((1+sample(sir,1, replace =F)
)/(1+sample(sdr,1, replace =F)) )^20); OM
OMs=runif(1000000,
min=(OM+(OM*-0.3)), max=(OM+(OM*+0.3)) )
sOM
<- rnorm(OMs, mean = mean((OMs)) , sd = sd((OMs)) )
#
CAPACITY FACTOR UNIFORM
CF=0.387121134174565
; CF
sCF=runif(1000000,
min=(CF+(CF*-0.3)) , max=(CF+(CF*+0.3)) )
#
DEAGRADATION UNIFORM
DR=0.04;
DR
sDR=runif(1000000,
min=(DR+(DR*-0.3)) , max=(DR+(DR*+0.3)) )
#
ENERGY OUTPUT
EO1=
((sCF *20000*8760 )/ (1+0)^1) +
((sCF *20000*8760 )/ (1+0)^2) +
((sCF *20000*8760 )/ (1+0)^3) +
((sCF *20000*8760 )/ (1+0)^4) +
((sCF *20000*8760 )/ (1+0)^5) +
((sCF *20000*8760 )/ (1+0)^6) +
((sCF *20000*8760 )/ (1+0)^7) +
((sCF *20000*8760 )/ (1+0)^8) +
((sCF *20000*8760 )/ (1+0)^9) +
((sCF *20000*8760 )/ (1+0)^10) +
((sCF *20000*8760 )/ (1+0)^11) +
((sCF *20000*8760 )/ (1+0)^12) +
((sCF *20000*8760 )/ (1+0)^13) +
((sCF *20000*8760 )/ (1+0)^14) +
((sCF *20000*8760 )/ (1+0)^15) +
((sCF *20000*8760 )/ (1+0)^16) +
((sCF *20000*8760 )/ (1+0)^17) +
((sCF *20000*8760 )/ (1+0)^18) +
((sCF *20000*8760 )/ (1+0)^19) +
((sCF *20000*8760 )/ (1+0)^20)
EO2=
((sCF *20000*8760 )/ (1+sDR )^1) +
((sCF *20000*8760 )/ (1+sDR )^2) +
((sCF *20000*8760 )/ (1+sDR )^3) +
((sCF *20000*8760 )/ (1+sDR )^4) +
((sCF *20000*8760 )/ (1+sDR )^5) +
((sCF *20000*8760 )/ (1+sDR )^6) +
((sCF *20000*8760 )/ (1+sDR )^7) +
((sCF *20000*8760 )/ (1+sDR )^8) +
((sCF *20000*8760 )/ (1+sDR )^9) +
((sCF *20000*8760 )/ (1+sDR )^10) +
((sCF *20000*8760 )/ (1+sDR )^11) +
((sCF *20000*8760 )/ (1+sDR )^12) +
((sCF *20000*8760 )/ (1+sDR )^13) +
((sCF *20000*8760 )/ (1+sDR )^14) +
((sCF *20000*8760 )/ (1+sDR )^15) +
((sCF *20000*8760 )/ (1+sDR )^16) +
((sCF *20000*8760 )/ (1+sDR )^17) +
((sCF *20000*8760 )/ (1+sDR )^18) +
((sCF *20000*8760 )/ (1+sDR )^19) +
((sCF *20000*8760 )/ (1+sDR )^20)
# IMPACT ENVIRONNEMENT
COST SCENARIOS
#
LIFE TIME UNIFORM
#LF=20;LF
#LFs=runif(1000000,
min=(LF+(LF*-0.3)) , max=(LF+(LF*+0.3)) )
#sLF=runif(1000000,
min=(LF+(LF*-0.3)) , max=(LF+(LF*+0.3)) )
# EQUATION DECOMPOSITE
# IMPACT
ENVIRONNEMENTAL UNIFORM
IE=36.3; IE
sIE=runif(1000000,
min=(IE+(IE*-0.3)) , max=(IE+(IE*+0.3)) )
#
EMISSION FACTOR DIESEL UNIFORM
EFD=0.277;
EFD
sEFD=runif(1000000,
min=(EFD+(EFD*-0.3)) , max=(EFD+(EFD*+0.3)) )
#
EMISSION FACTOR GAS UNIFORM
EFG=0.2;
EFG
sEFG=runif(1000000,
min=(EFG+(EFG*-0.3)) , max=(EFD+(EFG*+0.3)) )
#GARDANT LE CAS D NUL
POUR LE COUT ENVIRONNEMENT
IED=
(((sEFD*EO1)/1000)*sIE) /((1+sir)/(1+sdr)
^1) +
(((sEFD*EO1)/1000)*sIE) /((1+sir)/(1+sdr)
^2) +
(((sEFD*EO1)/1000)*sIE) /((1+sir)/(1+sdr)
^3) +
(((sEFD*EO1)/1000)*sIE) /((1+sir)/(1+sdr)
^4) +
(((sEFD*EO1)/1000)*sIE) /((1+sir)/(1+sdr)
^5) +
(((sEFD*EO1)/1000)*sIE) /((1+sir)/(1+sdr)
^6) +
(((sEFD*EO1)/1000)*sIE) /((1+sir)/(1+sdr)
^7) +
(((sEFD*EO1)/1000)*sIE) /((1+sir)/(1+sdr)
^8) +
(((sEFD*EO1)/1000)*sIE) /((1+sir)/(1+sdr)
^9) +
(((sEFD*EO1)/1000)*sIE) /((1+sir)/(1+sdr)
^10) +
(((sEFD*EO1)/1000)*sIE) /((1+sir)/(1+sdr)
^11) +
(((sEFD*EO1)/1000)*sIE) /((1+sir)/(1+sdr)
^12) +
(((sEFD*EO1)/1000)*sIE) /((1+sir)/(1+sdr)
^13) +
(((sEFD*EO1)/1000)*sIE) /((1+sir)/(1+sdr)
^14) +
(((sEFD*EO1)/1000)*sIE) /((1+sir)/(1+sdr)
^15) +
(((sEFD*EO1)/1000)*sIE) /((1+sir)/(1+sdr)
^16) +
(((sEFD*EO1)/1000)*sIE) /((1+sir)/(1+sdr)
^17) +
(((sEFD*EO1)/1000)*sIE) /((1+sir)/(1+sdr)
^18) +
(((sEFD*EO1)/1000)*sIE) /((1+sir)/(1+sdr)
^19) +
(((sEFD*EO1)/1000)*sIE) /((1+sir)/(1+sdr)
^20)
IEG=
(((sEFG*EO2)/1000)*sIE)
/((1+sir)/(1+sdr)^1) +
(((sEFG*EO2)/1000)*sIE) /((1+sir)/(1+sdr)
^2) +
(((sEFG*EO2)/1000)*sIE) /((1+sir)/(1+sdr)
^3) +
(((sEFG*EO2)/1000)*sIE) /((1+sir)/(1+sdr)
^4) +
(((sEFG*EO2)/1000)*sIE) /((1+sir)/(1+sdr)
^5) +
(((sEFG*EO2)/1000)*sIE) /((1+sir)/(1+sdr)
^6) +
(((sEFG*EO2)/1000)*sIE) /((1+sir)/(1+sdr)
^7) +
(((sEFG*EO2)/1000)*sIE) /((1+sir)/(1+sdr)
^8) +
(((sEFG*EO2)/1000)*sIE) /((1+sir)/(1+sdr)
^9) +
(((sEFG*EO2)/1000)*sIE) /((1+sir)/(1+sdr)
^10) +
(((sEFG*EO2)/1000)*sIE) /((1+sir)/(1+sdr)
^11) +
(((sEFG*EO2)/1000)*sIE) /((1+sir)/(1+sdr)
^12) +
(((sEFG*EO2)/1000)*sIE) /((1+sir)/(1+sdr)
^13) +
(((sEFG*EO2)/1000)*sIE) /((1+sir)/(1+sdr)
^14) +
(((sEFG*EO2)/1000)*sIE) /((1+sir)/(1+sdr)
^15) +
(((sEFG*EO2)/1000)*sIE) /((1+sir)/(1+sdr)
^16) +
(((sEFG*EO2)/1000)*sIE) /((1+sir)/(1+sdr)
^17) +
(((sEFG*EO2)/1000)*sIE) /((1+sir)/(1+sdr)
^18) +
(((sEFG*EO2)/1000)*sIE) /((1+sir)/(1+sdr) ^19) +
(((sEFG*EO2)/1000)*sIE) /((1+sir)/(1+sdr)
^20)
# Non Reutilistion
dans une simulation
#
PARAMETRE FIXED CONSIDERED
#
Turbine Capacity
#
Rotor diameter
#
Number of Turbine
#
Tower concept
#
Weibull k factor
#
shear exponent
#
Altitude
#
Air density
#
Cut in wind speed
#
Cut out wind speed
#
Energy loses
#
Availability
#
Net aep MWh/year
#_____________________________________________________________________________________________________________
# LCOE SIMULATION
MONTE CARLO
#-------------------------------------------------------------------------------------------------------------
sLCOE1
=(sIC + sOM) / EO1
#
sLCOE2
=(sIC + sOM) / EO2
sLCOE3
=(sIC + sOM-IED) / EO1
sLCOE4
=(sIC + sOM-IED) / EO2
sLCOE5
=(sIC + sOM-IEG) / EO1
sLCOE6 =(sIC +
sOM-IEG) / EO2
#UNE et UNE seule fois
pour eviter un effet de camouflage dans la distribution des autres parametres
#library(psych)
#describe(sLCOE1)
#c("item",
"name" ,"item number", "nvalid",
"mean", "sd","median", "mad",
"min", "max", "skew", "kurtosis",
"se"))
#sLCOE1=runif(1000000,
min=min(LCOE1s), max=max(LCOE1s) )
#VERIFICATION
cbind(summary(sIC))
#appartenance 41 600 000
cbind(summary(sOM))
#appartenance 137 611 390
cbind(summary(EO1))
#appartenance 1 457 709 447
cbind(summary(sLCOE1))
cbind(summary(sLCOE2))
cbind(summary(sLCOE3))
cbind(summary(sLCOE4))
cbind(summary(sLCOE5))
#a=
sIC+ ( (0.06*ICs)/(((1+sir)/(1+sdr))^1)
+
(0.06*ICs)/(((1+sir)/(1+sdr))^2)+
(0.06*ICs)/(((1+sir)/(1+sdr))^3)+
(0.06*ICs)/(((1+sir)/(1+sdr))^4)+
(0.06*ICs)/(((1+sir)/(1+sdr))^5)+
(0.06*ICs)/(((1+sir)/(1+sdr))^6)+
(0.06*ICs)/(((1+sir)/(1+sdr))^7)+
(0.06*ICs)/(((1+sir)/(1+sdr))^8)+
(0.06*ICs)/(((1+sir)/(1+sdr))^9)+
(0.06*ICs)/(((1+sir)/(1+sdr))^10)+
(0.06*ICs)/(((1+sir)/(1+sdr))^11)+
(0.06*ICs)/(((1+sir)/(1+sdr))^12)+
(0.06*ICs)/(((1+sir)/(1+sdr))^13)+
(0.06*ICs)/(((1+sir)/(1+sdr))^14)+
(0.06*ICs)/(((1+sir)/(1+sdr))^15)+
(0.06*ICs)/(((1+sir)/(1+sdr))^16)+
(0.06*ICs)/(((1+sir)/(1+sdr))^17)+
(0.06*ICs)/(((1+sir)/(1+sdr))^18)+
(0.06*ICs)/(((1+sir)/(1+sdr))^19)+
#(0.06*ICs)/(((1+sir)/(1+sdr))^20) )
#b= (sCF*20000*8760) /((1+0)^1) +
(sCF*20000*8760) /((1+0)^2)+
(sCF*20000*8760) /((1+0)^3)+
(sCF*20000*8760) /((1+0)^4)+
(sCF*20000*8760) /((1+0)^5)+
(sCF*20000*8760) /((1+0)^6)+
(sCF*20000*8760) /((1+0)^7)+
(sCF*20000*8760) /((1+0)^8)+
(sCF*20000*8760) /((1+0)^9)+
(sCF*20000*8760) /((1+0)^10)+
(sCF*20000*8760) /((1+0)^11)+
(sCF*20000*8760) /((1+0)^12)+
(sCF*20000*8760) /((1+0)^13)+
(sCF*20000*8760) /((1+0)^14)+
(sCF*20000*8760) /((1+0)^15)+
(sCF*20000*8760) /((1+0)^16)+
(sCF*20000*8760) /((1+0)^17)+
(sCF*20000*8760) /((1+0)^18)+
(sCF*20000*8760) /((1+0)^19)+
#(sCF*20000*8760) /((1+0)^20)
#k=a/b
# GRAPH RESULT LCOE SIMATION 1000 000 ITERATION BY DEFAULT
require(ggplot2)
#hp<-qplot(x
=sLCOE1, fill=..count.., geom="histogram");
hp+scale_fill_gradient(low="red",
high="green")+theme_classic() + theme_linedraw() # +geom_density(alpha=.2, fill="#FF6666") +theme_linedraw() +theme_light()
hist(sLCOE1 , prob=T,
xalb=NA, ylab=NA, main=NA)
#Qualité de
l'histogramme MC avec possibilité changement simulation pour une bonne repart.
plot(-1,-1,xlim=c(0.05,0.29),
ylim=c(0,13),xlab="YOBOKI Wind LCOE ($/KWh)",ylab="Probability
density function (%)")
#Ecart
la fenetre
grid(col="grey2",
lty="solid", lwd=0.1)
#grid(NULL, NA)
#5
TIMES
par(new=T)
plot(-1,-1,xlim=c(0.05,0.29),
ylim=c(0,13),xlab="YOBOKI Wind LCOE ($/KWh)",ylab="Probability
density function (%)")
#ECARTER LE GRAPH POUR
AFFICHAGE
#breaks="FD"
par(new=T)
{hist(sLCOE1,add=TRUE,
prob=T,border="yellow", breaks=50,col="blue3",
main=NA,xlab=NA,ylab=NA,axes=FALSE)}
lines(density(sLCOE1),
col='red', lty=1, lwd=1)
#QUANTILE
MONTE CARLO LCOE RESULT
cbind(quantile(sLCOE1,c(0.05,0.50,0.95)))
numb1
<- cbind(quantile(sLCOE1,c(0.05,0.50,0.95)));
library(scales);scientific(numb1, digits = 3)
q1=0.09896579
abline(v=q1,
lwd=0.5, col="darkred",lty=6)
#legend(q1, 70 ,"5%=
3.718e-4",cex=0.75,bg="white")
q2=
0.14247902
#abline(v=q2,
lwd=0.5, col="darkblue",lty=6)
#legend(q2, 70
,"50%= 2.342e-3"
,cex=0.75,bg="white")
q3=0.20895879
abline(v=
q3, lwd=0.5, col="darkgreen",lty=6)
#legend(q3,
70
,"95%=4.314e-3",cex=0.75,bg="white")
cbind(round(summary(sLCOE1),4));round(sd(sLCOE1),4)
#LEGEND
GRAPH
plot(-1,-1,xlim=c(0.05,0.25),
ylim=c(0,30),xlab="YOBOKI Wind LCOE ($/KWh)",ylab="Probability
density function (%)")
legend("topright",c("Degrdataion
(%) "),text.font=c(2),fill=c(topo.colors(500)), bty="n" )
legend("right",c("Density"),text.font=c(2),col='red',
lty=5, lwd=3, bty="n" )
#library(wesanderson);
names(wes_palettes)
#wes_palette("Zissou1",
10, type = "continuous")) #heat.colors(n), terrain.colors(n),
topo.colors(n), et cm.colors(n)
#hist(sLCOE,xlim=c(0,0.10),
ylim=c(0,75),breaks=170, prob=T, main=NA,xlab=" LCOE ($/KW) ", ylab=NA, col=
heat.colors(70),border="darkslategrey")
#require(lattice)
#histogram(~sLCOE,xlim=c(0,0.10),breaks=170,
prob=T, main=NA,xlab=" LCOE ($/KW)
", ylab=NA, col= heat.colors(70),border="darkslategrey")
#
#
GRAPH RANK CORRELATION SENSITIVITY
#
rc=data.frame(sLCOE1,sIC,
sir, sdr, sCF, sDR, sOM, sIE, sEFD, sEFG)
sp=cor(rc,rc$sLCOE1,method="spearman");sp
#method="spearman", alternative="two.sided"
# Non log ni centrée
reduire
# Graph avec Matlab
#
colfunc<-colorRampPalette(c("red","yellow","springgreen","royalblue"))
#COLORATION
plot(-100,100,
xlim=c(-10, 10), xlab= "Rank Correlation",
yaxt="n",xaxt="n" ,ylab=NA)
par(new=TRUE)
#
barplot(c(sp),horiz=T, col=colfunc(7))
#
grid(NULL,NA, col = "gray", lty = "dotted", lwd =
par("lwd"), equilogs = TRUE) # NULL OR NA OR c()
#CAN
USE EXCEL GRAPH
# GRAPH
PLOT LINE SENSITYVITY
pl=data.frame(median(sIC),median(sir),median(sdr),median(sCF),median(sDR),median(sOM));cbind(pl)
median(sIC)+
( median(sIC)*-0.15 )
median(sir)+
( median(sir)*-0.15 )
median(sdr)+
( median(sdr)*-0.15 )
median(sDR)+
( median(sDR)*-0.15 )
median(sCF)+
( median(sCF)*-0.15 )
#CAN
USE EXCEL Determinist(BETTER) or Stochastic
#
Better use mean in the literrature
#apply(),
lapply(), sapply(), tapply()
cbind(sapply(rc,
median))
#
TORNADO DIAGRAM SENSITYVITY
#
With Standard Deviation
library(ggplot2)
library(plyr)
library(dplyr)
library(tidyverse)
library(likert)
data(pisaitems)
#items28 <- pisaitems[, substr(names(pisaitems), 1, 5) == "ST24Q"]
p
<- likert(items28) ; plot(p)
sd(sIC)
sd(sir)
sd(sdr)
sd(sDR)
sd(sCF)
sd(sOM)
#https://www.youtube.com/watch?v=U3hTbYnTGGM
#https://www.youtube.com/watch?v=J9RZWI9cq4Y
# COUNTOUR DIAGRAM SENSITYVITY
#
sIC, sir, sdr, sCF, sDR, sOM
a
= sdr
b
= sir
c
= sLCOE1
x=c(min(a),min(a),min(a),median(a),median(a),median(a),max(a),max(a),max(a))
y=c(min(b),min(b),min(b),median(b),median(b),median(b),max(b),max(b),max(b))
z=c(min(c),min(c),min(c),median(c),median(c),median(c),max(c),max(c),max(c))
df <-
data.frame(x=x,y=y,z=z)
library(plotly)
p
<- plot_ly(data = df, x=~x,y=~y, z=~z, type = "contour",
colorscale='Jet');p
#
BASELINE CASE
rm(list=ls())
data=
read.table(file('clipboard'),header=T, sep="\t",dec=',')
str(data)
#
View(data)
summary(data)
data2=data[data$W90>=1
,]
summary(data2)
library(ggplot2)
library(ggthemes)
library(viridis)
library(scales)
library(tidyr)
attach(data)
gg
<- ggplot(data , aes(x=hour, y=month, fill = W85)) +
geom_tile(color = "white", size =
0.1) +
scale_x_discrete(expand=c(0,0)) +
scale_y_discrete(expand=c(0,0)) +
scale_fill_viridis(name="# of
calls", option = "magma") +
coord_equal() +
labs(x="hour", y=NULL,
title=sprintf("",stationid)) +
theme_tufte(base_family="Helvetica") +
theme(axis.text.x = element_text(angle = 45,
vjust = 1, hjust = 1))
gg
#infemo
viridis cividis rocket mako plasma
magma turbo
library(ggplot2)
p
<- ggplot(data2, aes(hour,month, fill = W90)) +
geom_tile(colour = "white") + scale_fill_gradient(low="blue",
high="green") +
xlab("") + ylab("") + ggtitle("") +
labs(fill = "wind speed (m/s)")
p
#low=red
medium=purple high = blue
#http://www.columbia.edu/~sg3637/blog/Time_Series_Heatmaps.html
#method
2 ggplot
p
<- ggplot(data,aes(x=hour,y=month,fill=W90))+
geom_tile()
p
#save
plot to working directory ggsave(p,filename="measles-basic.png")
#https://www.royfrancis.com/a-guide-to-elegant-tiled-heatmaps-in-r-2019/
ggp
<- ggplot(data2, aes(hour, month)) +
geom_tile(aes(fill = W90))
ggp
+ scale_fill_gradient(low = "green", high = "blue")
#https://statisticsglobe.com/heatmap-in-r
library(ggplot2)
library(viridis)
#
plot
ggplot(data,
aes(hour, month, fill=w80m)) +
geom_raster() +
coord_fixed(expand = TRUE) +
scale_fill_viridis()
X11()
ggplot(data,
aes(hour, month, fill=w80m)) +
geom_raster(interpolate = TRUE) +
coord_fixed(expand = FALSE) +
scale_fill_viridis()
statno
<-unique(data$stationid)
library(ggplot2)
library(dplyr)
# easier data wrangling
library(viridis)
# colour blind friendly palette, works in B&W also
library(Interpol.T)
# will generate a large dataset on
initial load
library(lubridate)
# for easy date manipulation
library(ggExtra)
# because remembering ggplot theme options is beyond me
library(tidyr)
names(data)
p
<-ggplot(data,aes(day,hour,fill=W90))+
geom_tile(color= "white",size=0.1)
+
scale_fill_viridis(name="",option
="C")
p
<-p + facet_grid(year~month)
p
<-p + scale_y_continuous(trans = "reverse", breaks =
unique(data$hour))
p
<-p + scale_x_continuous(breaks =c(1,10,20,31))
p
<-p + theme_minimal(base_size = 7)
p
<-p + labs(title= paste(""), x="Day",
y="Hour")
p
<-p + theme(legend.position = "bottom")+
theme(plot.title=element_text(size = 14))+
theme(axis.text.y=element_text(size=6)) +
theme(strip.background =
element_rect(colour="white"))+
theme(plot.title=element_text(hjust=0))+
theme(axis.ticks=element_blank())+
theme(axis.text=element_text(size=7))+
theme(legend.title=element_text(size=7))+
theme(legend.text=element_text(size=6))+
removeGrid()
p
library(ggplot2)
library(dplyr)
# easier data wrangling
library(viridis)
# colour blind friendly palette, works in B&W also
library(Interpol.T)
# will generate a large dataset on
initial load
library(lubridate)
# for easy date manipulation
library(ggExtra)
# because remembering ggplot theme options is beyond me
library(tidyr)
data
<- data(Trentino_hourly_T,package = "Interpol.T")
names(h_d_t)[1:5]<-
c("stationid","date","hour","temp","flag")
df <- tbl_df(h_d_t)
%>%
filter(stationid
=="T0001")
df
<- df %>% mutate(year = year(date),
month = month(date,
label=TRUE),
day = day(date))
df$date<-ymd(df$date)
# not necessary for plot but
#useful
if you want to do further work with the data
#cleanup
rm(list=c("h_d_t","mo_bias","Tn","Tx",
"Th_int_list","calibration_l",
"calibration_shape","Tm_list"))
#create
plotting df
df
<-df %>% select(stationid,day,hour,month,year,temp)%>%
fill(temp) #optional - see note below
View(df)
statno
<-unique(df$stationid)
#
Plotting starts here
p
<-ggplot(df,aes(day,hour,fill=temp))+
geom_tile(color= "white",size=0.1)
+
scale_fill_viridis(name="Hrly Temps
C",option ="C")
p
<-p + facet_grid(year~month)
p
<-p + scale_y_continuous(trans = "reverse", breaks =
unique(df$hour))
p
<-p + scale_x_continuous(breaks =c(1,10,20,31))
p
<-p + theme_minimal(base_size = 8)
p
<-p + labs(title= paste("Hourly Temps - Station",statno),
x="Day", y="Hour Commencing")
p
<-p + theme(legend.position = "bottom")+
theme(plot.title=element_text(size = 14))+
theme(axis.text.y=element_text(size=6)) +
theme(strip.background =
element_rect(colour="white"))+
theme(plot.title=element_text(hjust=0))+
theme(axis.ticks=element_blank())+
theme(axis.text=element_text(size=7))+
theme(legend.title=element_text(size=8))+
theme(legend.text=element_text(size=6))+
removeGrid()#ggExtra
p
#awesomeness
library("lattice")
#
Dummy data
data
<- expand.grid(X=hour, Y=month)
##
Try it out
levelplot(w80m
~ X*Y, data=data
,xlab="X",main="")
#
3) Air Passengers #3
heatmap.2(air_data,
trace = "none",
xlab = "month",
ylab = "year",
main = "Air Passengers #3",
density.info = "histogram",
dendrogram = "column",
keysize = 1.8)
#
#
BIG Manipulation Data
#
https://rpubs.com/justmarkham/dplyr-tutorial
#
load packages
suppressMessages(library(dplyr))
library(hflights)
#
explore data
data(hflights)
head(hflights)
#
convert to local data frame
flights
<- tbl_df(hflights)
#
printing only shows 10 rows and as many columns as can fit on your screen
flights
#
youcan specify that you want to see more rows
print(flights,
n=20)
#
convert to a normal data frame to see all of the columns
data.frame(head(flights))
#
base R approach to view all flights on January 1
flights[flights$Month==1
& flights$DayofMonth==1, ]
#
dplyr approach
#
note: you can use comma or ampersand to represent AND condition
filter(flights,
Month==1, DayofMonth==1)
#
use pipe for OR condition
filter(flights,
UniqueCarrier=="AA" | UniqueCarrier=="UA")
#
you can also use %in% operator
filter(flights,
UniqueCarrier %in% c("AA", "UA"))
#
base R approach to select DepTime, ArrTime, and FlightNum columns
flights[,
c("DepTime", "ArrTime", "FlightNum")]
#
dplyr approach
select(flights,
DepTime, ArrTime, FlightNum)
#
use colon to select multiple contiguous columns, and use `contains` to match
columns by name
#
note: `starts_with`, `ends_with`, and `matches` (for regular expressions) can
also be used to match columns by name
select(flights,
Year:DayofMonth, contains("Taxi"), contains("Delay"))
#
nesting method to select UniqueCarrier and DepDelay columns and filter for
delays over 60 minutes
filter(select(flights,
UniqueCarrier, DepDelay), DepDelay > 60)
#
chaining method
flights
%>%
select(UniqueCarrier, DepDelay) %>%
filter(DepDelay > 60)
#
create two vectors and calculate Euclidian distance between them
x1
<- 1:5; x2 <- 2:6
sqrt(sum((x1-x2)^2))
#
chaining method
(x1-x2)^2
%>% sum() %>% sqrt()
#
base R approach to select UniqueCarrier and DepDelay columns and sort by
DepDelay
flights[order(flights$DepDelay),
c("UniqueCarrier", "DepDelay")]
#
dplyr approach
flights
%>%
select(UniqueCarrier, DepDelay) %>%
arrange(DepDelay)
#
use `desc` for descending
flights
%>%
select(UniqueCarrier, DepDelay) %>%
arrange(desc(DepDelay))
#
base R approach to create a new variable Speed (in mph)
flights$Speed
<- flights$Distance / flights$AirTime*60
flights[,
c("Distance", "AirTime", "Speed")]
#
dplyr approach (prints the new variable but does not store it)
flights
%>%
select(Distance, AirTime) %>%
mutate(Speed = Distance/AirTime*60)
#
store the new variable
flights
<- flights %>% mutate(Speed = Distance/AirTime*60)
#
base R approaches to calculate the average arrival delay to each destination
head(with(flights,
tapply(ArrDelay, Dest, mean, na.rm=TRUE)))
head(aggregate(ArrDelay
~ Dest, flights, mean))
#
dplyr approach: create a table grouped by Dest, and then summarise each group
by taking the mean of ArrDelay
flights
%>%
group_by(Dest) %>%
summarise(avg_delay = mean(ArrDelay,
na.rm=TRUE))
#
for each carrier, calculate the percentage of flights cancelled or diverted
flights
%>%
group_by(UniqueCarrier) %>%
summarise_each(funs(mean), Cancelled,
Diverted)
#
for each carrier, calculate the minimum and maximum arrival and departure
delays
flights
%>%
group_by(UniqueCarrier) %>%
summarise_each(funs(min(., na.rm=TRUE),
max(., na.rm=TRUE)), matches("Delay"))
#
for each day of the year, count the total number of flights and sort in
descending order
flights
%>%
group_by(Month, DayofMonth) %>%
summarise(flight_count = n()) %>%
arrange(desc(flight_count))
#
rewrite more simply with the `tally` function
flights
%>%
group_by(Month, DayofMonth) %>%
tally(sort = TRUE)
#
for each destination, count the total number of flights and the number of
distinct planes that flew there
flights
%>%
group_by(Dest) %>%
summarise(flight_count = n(), plane_count =
n_distinct(TailNum))
#
for each destination, show the number of cancelled and not cancelled flights
flights
%>%
group_by(Dest) %>%
select(Cancelled) %>%
table() %>%
head()
#
for each carrier, calculate which two days of the year they had their longest
departure delays
#
note: smallest (not largest) value is ranked as 1, so you have to use `desc` to
rank by largest value
flights
%>%
group_by(UniqueCarrier) %>%
select(Month, DayofMonth, DepDelay) %>%
filter(min_rank(desc(DepDelay)) <= 2)
%>%
arrange(UniqueCarrier, desc(DepDelay))
#
rewrite more simply with the `top_n` function
flights
%>%
group_by(UniqueCarrier) %>%
select(Month, DayofMonth, DepDelay) %>%
top_n(2) %>%
arrange(UniqueCarrier, desc(DepDelay))
#
for each month, calculate the number of flights and the change from the
previous month
flights
%>%
group_by(Month) %>%
summarise(flight_count = n()) %>%
mutate(change = flight_count -
lag(flight_count))
#
rewrite more simply with the `tally` function
flights
%>%
group_by(Month) %>%
tally() %>%
mutate(change = n - lag(n))
#
randomly sample a fixed number of rows, without replacement
flights
%>% sample_n(5)
#
randomly sample a fraction of rows, with replacement
flights
%>% sample_frac(0.25, replace=TRUE)
#
base R approach to view the structure of an object
str(flights)
#
dplyr approach: better formatting, and adapts to your screen width
glimpse(flights)
#
connect to an SQLite database containing the hflights data
my_db
<- src_sqlite("my_db.sqlite3")
#
connect to the "hflights" table in that database
flights_tbl
<- tbl(my_db, "hflights")
#
example query with our data frame
flights
%>%
select(UniqueCarrier, DepDelay) %>%
arrange(desc(DepDelay))
#
identical query using the database
flights_tbl
%>%
select(UniqueCarrier, DepDelay) %>%
arrange(desc(DepDelay))
#
send SQL commands to the database
tbl(my_db,
sql("SELECT * FROM hflights LIMIT 100"))
#
ask dplyr for the SQL commands
flights_tbl
%>%
select(UniqueCarrier, DepDelay) %>%
arrange(desc(DepDelay)) %>%
explain()
#
#
https://rpubs.com/justmarkham/dplyr-tutorial
#
Annexe of R programm Excution
attach(data)
names(data)
library(plot3D)
x
<- Temperature...C.
y
<- Wind.speed..80m.
z
<- Pression
#using
rgl
scatter3D(x=x,
y=y, z=z, bty = "b2", pch =
20, cex = 2, ticktype = "detailed")
#“b”,
“b2”, “f”, “g”, “bl”, “bl2”, “u”, “n”
#DIURNAL
MEAN HOURS PER MONTH
# J1(31)
F(28/29) M1(31) A1(30) M2(31) J2(30) J3(31) A2(31) S(30) O(31) N(30)
D(31)
jan=c(dj1,dj2,dj3,dj4,dj5,dj6,dj7,dj8,dj9,dj10,dj11,dj12,dj13,dj14,dj15,dj16,dj17,dj18,dj19,dj20,dj21,dj22,dj23,dj24,dj25,dj26,dj27,dj28,dj29,dj30,dj31)
feb=c(df1,df2,df3,df4,df5,df6,df7,df8,df9,df10,df11,df12,df13,df14,df15,df16,df17,df18,df19,df20,df21,df22,df23,df24,df25,df26,df27,df28)
mar=c(dm1,dm2,dm3,dm4,dm5,dm6,dm7,dm8,dm9,dm10,dm11,dm12,dm13,dm14,dm15,dm16,dm17,dm18,dm19,dm20,dm21,dm22,dm23,dm24,dm25,dm26,dm27,dm28,dm29,dm30,dm31)
apr=c(da1,da2,da3,da4,da5,da6,da7,da8,da9,da10,da11,da12,da13,da14,da15,da16,da17,da18,da19,da20,da21,da22,da23,da24,da25,da26,da27,da28,da29,da30)
mai=c(dmm1,dmm2,dmm3,dmm4,dmm5,dmm6,dmm7,dmm8,dmm9,dmm10,dmm11,dmm12,dmm13,dmm14,dmm15,dmm16,dmm17,dmm18,dmm19,dmm20,dmm21,dmm22,dmm23,dmm24,dmm25,dmm26,dmm27,dmm28,dmm29,dmm30,dmm31)
jun=c(djj1,djj2,djj3,djj4,djj5,djj6,djj7,djj8,djj9,djj10,djj11,djj12,djj13,djj14,djj15,djj16,djj17,djj18,djj19,djj20,djj21,djj22,djj23,djj24,djj25,djj26,djj27,djj28,djj29,djj30)
jul=c(jd1,jd2,jd3,jd4,jd5,jd6,jd7,jd8,jd9,jd10,jd11,jd12,jd13,jd14,jd15,jd16,jd17,jd18,jd19,jd20,jd21,jd22,jd23,jd24,jd25,jd26,jd27,jd28,jd29,jd30,jd31)
aou=c(da1,da2,da3,da4,da5,da6,da7,da8,da9,da10,da11,da12,da13,da14,da15,da16,da17,da18,da19,da20,da21,da22,da23,da24,da25,da26,da27,da28,da29,da30,da31)
sep=c(dsep1,dsep2,dsep3,dsep4,dsep5,dsep6,dsep7,dsep8,dsep9,dsep10,dsep11,dsep12,dsep13,dsep14,dsep15,dsep16,dsep17,dsep18,dsep19,dsep20,dsep21,dsep22,dsep23,dsep24,dsep25,dsep26,dsep27,dsep28,dsep29,dsep30)
oct=c(do1,do2,do3,do4,do5,do6,do7,do8,do9,do10,do11,do12,do13,do14,do15,do16,do17,do18,do19,do20,do21,do22,do23,do24,do25,do26,do27,do28,do29,do30,do31)
nov=c(dn1,dn2,dn3,dn4,dn5,dn6,dn7,dn8,dn9,dn10,dn11,dn12,dn13,dn14,dn15,dn16,dn17,dn18,dn19,dn20,dn21,dn22,dn23,dn24,dn25,dn26,dn27,dn28,dn29,dn30)
dec=c(dd1,dd2,dd3,dd4,dd5,dd6,dd7,dd8,dd9,dd10,dd11,dd12,dd13,dd14,dd15,dd16,dd17,dd18,dd19,dd20,dd21,dd22,dd23,dd24,dd25,dd26,dd27,dd28,dd29,dd30,dd31)
rbind(mean(jan),mean(feb),mean(mar),mean(apr),mean(mai),mean(jun),mean(jul),mean(aou),mean(sep),mean(oct),mean(nov),mean(dec))
mean(data$w80)
rbind(min(jan),min(feb),min(mar),min(apr),min(mai),min(jun),min(jul),min(aou),min(sep),min(oct),min(nov),min(dec))
min(data$w80)
rbind(max(jan),max(feb),max(mar),max(apr),max(mai),max(jun),max(jul),max(aou),max(sep),max(oct),max(nov),max(dec))
max(data$w80)
var=c(jan,feb,mar,apr,mai,jun,jul,aou,sep,oct,nov,dec)
sea=c(mean(jan),mean(feb),mean(mar),mean(apr),mean(mai),mean(jun),mean(jul),mean(aou),mean(sep),mean(oct),mean(nov),mean(dec))
write.table(jan,"jan.txt")
write.table(feb,"feb.txt")
write.table(mar,"mar.txt")
write.table(apr,"apr.txt")
write.table(mai,"mai.txt")
write.table(jun,"jun.txt")
write.table(jul,"jul.txt")
write.table(aou,"aou.txt")
write.table(sep,"sep.txt")
write.table(oct,"oct.txt")
write.table(nov,"nov.txt")
write.table(dec,"dec.txt")
plot(var,
type="l")
plot(sea,
type="l")
boxplot(jan,feb,mar,apr,mai,jun,jul,aou,sep,oct,nov,dec,
col=rainbow(12) )
qj=cbind(quantile(jan,
probs = c(0.1,0.25,0.75, 0.9)))
qf=cbind(quantile(feb,
probs = c(0.1,0.25,0.75, 0.9)))
qm=cbind(quantile(mar,
probs = c(0.1,0.25,0.75, 0.9)))
qa=cbind(quantile(apr,
probs = c(0.1,0.25,0.75, 0.9)))
qm1=cbind(quantile(mai,
probs = c(0.1,0.25,0.75, 0.9)))
qj=cbind(quantile(jun,
probs = c(0.1,0.25,0.75, 0.9)))
qjj=cbind(quantile(jul,
probs = c(0.1,0.25,0.75, 0.9)))
qaa=cbind(quantile(aou,
probs = c(0.1,0.25,0.75, 0.9)))
qs=cbind(quantile(sep,
probs = c(0.1,0.25,0.75, 0.9)))
qo=cbind(quantile(oct,
probs = c(0.1,0.25,0.75, 0.9)))
qn=cbind(quantile(nov,
probs = c(0.1,0.25,0.75, 0.9)))
qd=cbind(quantile(dec,
probs = c(0.1,0.25,0.75, 0.9)))
q=cbind(c(qj,qf,qm,qa,qm1,qj,qjj,qaa,qs,qo,qn,qd));q
quan=read.csv(file("clipboard"),header=T,sep="\t",
dec=".",row.names=1);str(quan)
plot(sea,
type="l")
plot(-1,-1,xlim=c(0,12),
ylim=c(5,15),xlab="Month",ylab="Average Wind speed(m/s)")
grid(col="black",
lty="solid", lwd=1.5)
par(new=TRUE);plot(sea,lwd=1.5,lty=1,pch=20,type="b",
axes=F,ylab=NA,xlab=NA)
par(new=TRUE);plot(quan$q0.1,
type="l",lty=2,lwd=3,col="red", axes=F,
ylab=NA,xlab=NA);par(new=TRUE);plot(quan$q0.9,
type="l",lty=2,lwd=3,col="red",
axes=F,ylab=NA,xlab=NA)
par(new=TRUE);plot(quan$q0.25,
type="l",lty=3,lwd=3,col="blue", axes=F,
ylab=NA,xlab=NA);par(new=TRUE);plot(quan$q0.75,
type="l",lty=3,lwd=3,col="blue",
axes=F,ylab=NA,xlab=NA)
#TIMES
SERIES ANALYSIS
ts
<- ts(var, frequency=30, start=c(2015))
plot.ts(ts)
tsdec<-
decompose(ts)
plot(tsdec)
wmjan=c(mean(c(dj1,dj2,dj3,dj4,dj5,dj6,dj7)),mean(c(dj8,dj9,dj10,dj11,dj12,dj13,dj14)),mean(c(dj15,dj16,dj17,dj18,dj19,dj20,dj21)),mean(c(dj22,dj23,dj24,dj25,dj26,dj27,dj28,dj29,dj30,dj31)))
wmfeb=c(mean(c(df1,df2,df3,df4,df5,df6,df7)),mean(c(df8,df9,df10,df11,df12,df13,df14)),mean(c(df15,df16,df17,df18,df19,df20,df21)),mean(c(df22,df23,df24,df25,df26,df27,df28)))
wmmar=c(mean(c(dm1,dm2,dm3,dm4,dm5,dm6,dm7)),mean(c(dm8,dm9,dm10,dm11,dm12,dm13,dm14)),mean(c(dm15,dm16,dm17,dm18,dm19,dm20,dm21)),mean(c(dm22,dm23,dm24,dm25,dm26,dm27,dm28,dm29,dm30,dm31)))
wmapr=c(mean(c(da1,da2,da3,da4,da5,da6,da7)),mean(c(da8,da9,da10,da11,da12,da13,da14)),mean(c(da15,da16,da17,da18,da19,da20,da21)),mean(c(da22,da23,da24,da25,da26,da27,da28,da29,da30)))
wmmai=c(mean(c(dmm1,dmm2,dmm3,dmm4,dmm5,dmm6,dmm7)),mean(c(dmm8,dmm9,dmm10,dmm11,dmm12,dmm13,dmm14)),mean(c(dmm15,dmm16,dmm17,dmm18,dmm19,dmm20,dmm21)),mean(c(dmm22,dmm23,dmm24,dmm25,dmm26,dmm27,dmm28,dmm29,dmm30,dmm31)))
wmjun=c(mean(c(djj1,djj2,djj3,djj4,djj5,djj6,djj7)),mean(c(djj8,djj9,djj10,djj11,djj12,djj13,djj14)),mean(c(djj15,djj16,djj17,djj18,djj19,djj20,djj21)),mean(c(djj22,djj23,djj24,djj25,djj26,djj27,djj28,djj29,djj30)))
wmjul=c(mean(c(jd1,jd2,jd3,jd4,jd5,jd6,jd7)),mean(c(jd8,jd9,jd10,jd11,jd12,jd13,jd14)),mean(c(jd15,jd16,jd17,jd18,jd19,jd20,jd21)),mean(c(jd22,jd23,jd24,jd25,jd26,jd27,jd28,jd29,jd30,jd31)))
wmaou=c(mean(c(da1,da2,da3,da4,da5,da6,da7)),mean(c(da8,da9,da10,da11,da12,da13,da14)),mean(c(da15,da16,da17,da18,da19,da20,da21)),mean(c(da22,da23,da24,da25,da26,da27,da28,da29,da30,da31)))
wmsep=c(mean(c(dsep1,dsep2,dsep3,dsep4,dsep5,dsep6,dsep7)),mean(c(dsep8,dsep9,dsep10,dsep11,dsep12,dsep13,dsep14)),mean(c(dsep15,dsep16,dsep17,dsep18,dsep19,dsep20,dsep21)),mean(c(dsep22,dsep23,dsep24,dsep25,dsep26,dsep27,dsep28,dsep29,dsep30)))
wmoct=c(mean(c(do1,do2,do3,do4,do5,do6,do7)),mean(c(do8,do9,do10,do11,do12,do13,do14)),mean(c(do15,do16,do17,do18,do19,do20,do21)),mean(c(do22,do23,do24,do25,do26,do27,do28,do29,do30,do31)))
wmnov=c(mean(c(dn1,dn2,dn3,dn4,dn5,dn6,dn7)),mean(c(dn8,dn9,dn10,dn11,dn12,dn13,dn14)),mean(c(dn15,dn16,dn17,dn18,dn19,dn20,dn21)),mean(c(dn22,dn23,dn24,dn25,dn26,dn27,dn28,dn29,dn30)))
wmdec=c(mean(c(dd1,dd2,dd3,dd4,dd5,dd6,dd7)),mean(c(dd8,dd9,dd10,dd11,dd12,dd13,dd14)),mean(c(dd15,dd16,dd17,dd18,dd19,dd20,dd21)),mean(c(dd22,dd23,dd24,dd25,dd26,dd27,dd28,dd29,dd30,dd31)))
wmvar=c(wmjan,wmfeb,wmmar,wmapr,wmmai,wmjun,wmjul,wmaou,wmsep,wmoct,wmnov,wmdec)
plot(wmvar,
type="l")
plot(-1,-1,xlim=c(0,60),
ylim=c(0,16),xlab="Month",ylab="Average Wind speed(m/s)")
grid(col="black",
lty="solid", lwd=1.5)
par(new=TRUE);plot(wmvar,lwd=1.5,lty=1,pch=20,type="b",col="red",
axes=F,ylab=NA,xlab=NA)
#
J1(31) F(28/29) M1(31) A1(30) M2(31) J2(30) J3(31) A2(31) S(30) O(31) N(30)
D(31)
dim(data) # NEED 52560
j1=data[c(1:4464),]
f2=data[c(4465:8497),]
m3=data[c(8498:12962),]
a4=data[c(12963:17283),]
m5=data[c(17284:27748),]
wsj=data[c(1:4464),]
wsjj=data[c(1:4320),]
wsao=data[c(1:4464),]
wssep=data[c(1:4464),]
wso=data[c(1:4320),]
wsn=data[c(1:4464),]
wsd=data[c(1:4320),]
write.table(j1,"j1.txt")
#
#
WIND ROSE DIAGRAM :
goubet<-read.csv(file("clipboard"),header=T,sep="\t",
dec=",",row.names=1)
#goubet<-read.csv(file("clipboard"),header=T,sep="\t",
dec=",")
#
"right", "left", "top", "bottom"
summary(w60)
require(openair)
windRose(data,
ws = "w60", wd ="d60",
paddle = F, border = T,
breaks = c(0, 4, 8, 12, 16, 21),
key.position = "bottom",
col =c( "blue",
"purple", "pink","red", "darkred"),
grid.line = list(value = 5, lty = 1,
lwd=3, col = "black"), annotate = TRUE,
key.header = "Wind Speed",
angle.scale = 35)
#WindRose.R
#
POTENTIAL AREA ASSESSMENT
#WIND
DENSITY POWER
rho=1.225
#kg/m3
#PD=rho*
(sum(v2^3)/length(v2))/2 = PD/A (W/M2)
PDA
= (rho* (mean(v2^3)))/2; PDA
Radius=sqrt(10024/pi);Radius
Diam=
2*Radius; Diam # 112.9732
#POWER
CAPTUTRED
cp=(0.35+0.45)/2
PC=0.5*rho*Radius^2*cp*(v2^3)
plot(PC,
type="l")
#
pot<-read.csv(file("clipboard"),header=T,sep="\t",
dec=",",row.names=1)
str(pot)
dim(pot)
summary(pot)
cbind(summary(pot1$Extract_tif12_Band_1))
attach(pot)
names(pot)
#400-500 6.8 - 7.3
#500-600 7.3 - 7.7
#600-800 7.7 – 8.5
#>
800 > 8.5
#23703 # dim(pot)
a=WS[WS>
6.8 & WS < 7.29 ]
b=WS[WS>
7.299 & WS < 7.69 ]
c=WS[WS>
7.699 & WS < 8.499 ]
d=WS[WS>
85 ]
z=WS[WS<6.8]
A=PD[PD>
400 & PD< 499 ]
B=PD[PD>
499.9 & PD< 599.9]
C=PD[PD>
500.9 & PD< 800 ]
D=PD[PD>
800 ]
1825.99 #km2 ARTA
#CAPACITY
FACTOR INTERMITTENCE FACTOR
k=kmm;
c=cmm
vi=3;
vr=11.5; vo=25;
Pout= exp(-((vi/c)^k))
- exp(-((vr/c)^k))
Pr=((vr/c)^k) -
((vi/c)^k)
CF = (Pout/Pr) -
exp(-(vo/c)^k); CF
#
# WEIBULL DISTRIBUTION
#EXTRAPOLATION
WIND SPEED | WIND SHEAR COEFICCIENT
#names(data)
#v1=w60
#length(v1)
#h1=60;
h2=90
#Methodology
1:
#alpha=(0.37-(0.088*log(mean(v1))))/(1-(0.088*log(h1/10)));alpha
#v2=v1*((h2/h1)^alpha);mean(v2)
#write.table(v2,"data.txt")
#Methodology
2 :
#v2=(v1*((h2/h1)^(1/7))); ko=2.93 ; co=10.07
#c80=co*(80/40)^(0.37-(0.088*log(co)))/(1-0.088*log(80/10));
c80
#k80=ko*(
(1-(0.088*log(40/10)))/(1-(0.088*log(80/10))) ); k80 ;c80
#WEIBULL
AND RAYLEIGH DISTRIBUTION :
moy=mean(v2)
sd=(sd(v2)*length(v2))/(length(v2)-1)
count=data.frame(table(v2))
#
Kurtosis & Skewness
skew=sum((v2-moy)^3/sd^3)/(length(v2)-1)
kurt=sum((((v2-moy)^4)/(sd^4))-3)/(length(v2)-1)
min=min(v2)
max=max(v2)
rbind(c("Statistic
of v2"),moy,sd,skew,kurt,min,max)
#
After Result of Paralmeter find it
#Wind
Speed Probability WEIBULL
k;kmm;kmv;kwasp
c;cmm;cmv;cwasp
kp=k;
cp=c
vmp=cp*
((1-(1/kp))^(1/kp)) ;round(vmp,3)
vmaxe=cp*
((1+(2/kp))^(1/kp)) ;round(vmaxe,3)
vmw=cp*gamma((1+(1/kp))) ;round(vmw,3)
#
#GRAPHIQUE
POWER/ENERGY DENSITY TURBINE WITH R
LIBRARY #
#doi:10.3390/app8101757
#
k;kmm;kmv;kwasp #
c;cmm;cmv;cwasp #
kp=k;
cp=c #
#
#
DENSITY in term of area m² #
rho=1.225
#kg/m3 #
PDW
= (rho* (cp^3)*gamma((kp+3)/kp) ) /2; PDW #
PDA
= (rho* (mean(v2^3)))/2; PDA #
#
-2First Group Estimation Weibull Distribution
#Empirique
de Jestus
kej=(sd/moy)^-1.086
cej=moy/gamma((1+(1/kej)))
#Maximum-Likelihood
Method
kmv=1/(sum(v2^(kej+0.09)*log(v2))/sum(v2^(kej+0.09))
-sum(log(v2))/length(v2))
cmv=(sum(v2^kmv)/length(v2))^(1/kmv)
#Power-Density
Method OR Energy pattern factor method
E=(sum(v2^3)/length(v2))/moy^3
kpd=1+(3.69/E^2)
cpd=moy/gamma((1+(1/kpd)))
#Curving-Fitting
Method
kcf=(0.9874/(sd/moy))^1.0983
ccf=moy/gamma((1+(1/kcf)))
#require(ForestFit)
#n<-100;
alpha<-2; beta<-2; theta<-3
#data<-rweibull(n,shape=alpha,scale=beta)+theta
#
rbind(kej,kmv,kpd,kcf,cej,cmv,cpd,ccf)
#Empirical
method of Justus/ moment method
cej;kej
cmm=11.391
; kmm=2.681
mean(v2);
cmm*gamma(1+(1/kmm)); sd(v2); cmm*
sqrt(gamma(1+(2/kmm))-(gamma(1+(1/kmm))^2 ))
#Multi-Objective
Moments (MUOM)
#kmm=(sd/moy)^-1.086
#cmm=moy/gamma((1+(1/kmm)))
#L1=1/3;L2=1/3;L3=1/3
#k=2.645224;
c=10.2183
#L1*(c*gamma(1+(1/k))-mean(v2))^2
+ L2*(c^2*gamma(1+(2/k))-mean(v2^2))^2
+
L3*(c^3*gamma(1+(3/k))-mean(v2^3))^2
#k=2.68012;
c=10.2201
#L1*(c*gamma(1+(1/k))-mean(v2))^2
+ L2*(c^2*gamma(1+(2/k))-mean(v2^2))^2
+
L3*(c^3*gamma(1+(3/k))-mean(v2^3))^2
#k=2.69;
c=10.20
#L1*(c*gamma(1+(1/k))-mean(v2))^2
+ L2*(c^2*gamma(1+(2/k))-mean(v2^2))^2
+
L3*(c^3*gamma(1+(3/k))-mean(v2^3))^2
#
-3-> > > Second Group Estimation Weibull Distribution
#Median
and Quartiles Method
cbind(quantile(v2))
Q0
Q1 = 7.0880821 # 25%
Q3 = 13.2096076 # 75%
Q2 = 10.3099377 # 50%
Q4
#kmq=1.573/log(Q3/Q1)
kmq=log(log(1/4)/log(3/4))/log(Q3/Q1)
cmq=Q2/log(2)^(1/kmq)
#*====================*Least
square Method Or Graphical method
F=1-exp(-(v2/(cmm+0.09))^(kmm+0.09))
x = log(v2)
y =log(-log(1-F))
ls=lm(y~x);summary(ls)
kls= 2.781e+00
Intercept= -6.752e+00
cls=exp(-(Intercept/kls))
#Modified
maximum likelihood Method
frq=hist(v2);
dev.off()
#kmml=1/(sum((frq$mids^(kmv+0.09))*log(frq$mids)*frq$density)/sum((frq$mids^(kmv+0.09))*frq$density)
-sum(log(frq$mids)*frq$density)/sum(frq$density))
#cmml=(sum(frq$mids^kmml*frq$density)/sum(frq$density))^(1/kmml)
kmml=sum(frq$mids^(kmv+0.09)*log(frq$mids)*frq$density)/((sum(frq$mids^(kmv+0.09)*frq$density))-
(sum(log(frq$mids)*frq$density)/sum(frq$density)))
cmml=((1/sum(frq$density))*sum(frq$mids^kmml*frq$density))^(1/kmml)
#Empirical
method of lysen (EML)
keml=(sd/moy)^-1.086
ceml=moy/gamma(1+(1/keml))
ceml2=mean(v2)*(0.568+(0.433/keml))^(-(1/keml))
#__________________________________________
rbind(kmq,keml,kls,kmml,k,
cmq,ceml,cls,cmml,c)
#-3->
> > Third Group Estimation Weibull Distribution
#Equivalent
Energy Method
kee=1/(sum((frq$mids^(kmv+0.09))*log(frq$mids)*frq$density)/sum((frq$mids^(kmv+0.09))*frq$density)
-sum(log(frq$mids)*frq$density)/sum(frq$density))
#cee=((moy^3)/gamma(1+(3/kee)))^(1/3)
cee=((moy^3)/gamma(1+(3/kmm)))^(1/3)
#Probability
weighted moments Method(based on power density method)
#sort(x,
decreasing = FALSE, na.last = TRUE)
#vi=sort(v2,
decreasing = FALSE, na.last = TRUE)
# ATTENTION A TRIER
POUR EVITER L'ERREUR
#Cbar
= mean(v2)/ (2/n(n-1))* sum(vi*(n-i) == A TRIER !
Cbar=moy/(
(2/(length(v2)*(length(v2)-1))) * 1.05801E+11 )
kpwm=log(2)/log(Cbar)
cpwm=(moy^3/gamma(1+(3/kpwm)))^(1/3)
#*====================*WAsP
Method
F=1-exp(-(mean(v2)/(cmm+0.05))^(kmm+0.05))
cwasp=(sum(v2^3)/(length(v2)*gamma((3/(kmm+0.05))+1)))^(1/3)
X=1-mean(F)
w1=-log(X)
#log(w1)==log((moy/cwasp)^k)
#log((moy/cwasp)^k)=k*log(moy)-log(cwasp)=k*log(moy/cwasp)
kwasp
= log(w1)/log(moy/cwasp)
#z=1-F;
w=-log(z);w
#x=(mean(v2)/((sum(v2^3))/(length(v2)*gamma((3/kmm)+1))^(1/3)))^2.61;
x
#*====================*Weighted
Least Square Method
F=1-exp(-(v2/(cmm+0.09))^(kmm+0.09))
x = log(v2)
y =log(-log(1-F))
w=((1-F)*log(1-F))^2
library(MASS)
summary(rlm(y~x,weights
= 1/w))
kwls= 2.780600e+00
Intercept= -6.752000e+00
cwls=exp(-(Intercept/kwls))
#
rbind(kee,kpwm,kwasp,kwls,cee,cpwm,cwasp,cwls)
#OTHER
POSSIBILITY OF WEIBULL
#temp
<- dput(temp)
#"mle",
"mme", "qme", "mge", "mse"
#
Fitting method:
#"mme"
for 'moment matching estimation'
#mmedist(v2,
"weibull", order, start = NULL, fix.arg = NULL, optim.method =
"default")
library(fitdistrplus)
x3
<- rbeta(v2,shape1=cmq, shape2=kmq)
mmedist(x3,
"beta")$estimate
# "mse" for 'maximum spacing
estimation
fitdist(v2,
distr = "weibull", method = "mle")
#------------"mle"
for 'maximum likelihood estimation
mledist(v2,
distr="weibull", start = NULL, fix.arg = NULL, optim.method =
"default")$estimate
fitdist(v2,
distr = "weibull", method = "mle")
#-------------"qme"
for 'quantile matching estimation
qmedist(V_60,
"weibull", probs=c(1/3, 2/3))$estimate
#------------"mge"
for 'maximum goodness-of-fit estimation
#"CvM",
"KS", "AD", "ADR", "ADL",
"AD2R", "AD2L",
mgedist(V_60,
distr="weibull", gof = "ADL", start = NULL, fix.arg = NULL,
optim.method = "default")
# The mgedist function numerically maximizes
goodness-of-fit:or minimizes a goodness-of-fit distance coded by the argument
gof.
# One may use one of the classical distances
defined in Stephens (1986)
# the Cramer-von Mises distance
("CvM")
# the Kolmogorov-Smirnov distance
("KS")
# the Anderson-Darling distance
("AD")
#which
gives more weight to the tails of the distribution, or one of the variants of
this last distance proposed by Luceno (2006).
# the right-tail AD ("ADR")
# the left-tail AD ("ADL")
require(MASS)
library(fitdistrplus)
fit.weibull
<- fitdist(V_60, distr = "weibull", method = "mle",
lower = c(0, 0))
fit.weibull2
<- fitdist(V_60, distr = "weibull", method = "mse",
lower = c(0, 0))
#fit.gamma
<- fitdist(temp, distr = "gamma", method = "mle", lower
= c(0, 0), start = list(scale = 1, shape = 1))
#fit.weibull
<- fitdist(temp, distr = "weibull", method = "mle",
lower = c(0, 0))
#fit.gamma
<- fitdist(temp, distr = "gamma", method = "mle", lower
= c(0, 0), start = list(scale = 1, shape = 1))
plot(fit.weibull)
#plot(fit.gamma)
gofstat(list(fit.weibull,
fit.weibull2))
# ESTIMATION OTHER WEIBULL 2
#https://rdrr.io/cran/Temporal/man/fit.Weibull.html
#library(Temporal)
#
Generate Weibull data with 20% censoring
#data
= genData(n=1e3,dist="weibull",theta=c(2,2),p=0.2);
#
Estimate
#fit
= fitParaSurv(time=data$time,status=data$status,dist="weibull")
#BOOSTRAPE
SUIMULATION ESTIMATION
#Boodist
#CREATION
OF FREQUENCYS RELATIVE OF WIND SPEED M/S
#using
Excel lead to replace by element specific and not by number
#data$Variable[data$Variable<=100]<-1
#data$Variable[data$Variable
%in%
c("maison","appart","truc")]<-"habitat"
goubet<-read.csv(file("clipboard"),header=T,sep="\t",
dec=",",row.names=1)
str(goubet);
attach(goubet); names(goubet)
effectif<-table(V_60);
cbind(effectif)
frequence<-(effectif)/length(V_60);
cbind(frequence)
# data$V_60[data$V_60== ]<-
#
GOBAAD STATION
#################
data$v2[data$v2== 0.4
]<- 1.113932e-02
data$v2[data$v2== 0.5
]<- 3.970328e-03
data$v2[data$v2== 0.6
]<- 3.708959e-03
data$v2[data$v2== 0.7
]<- 4.007667e-03
data$v2[data$v2== 0.8
]<- 4.468175e-03
data$v2[data$v2== 0.9
]<- 4.455729e-03
data$v2[data$v2== 1
]<- 4.754437e-03
data$v2[data$v2== 1.1
]<- 5.015807e-03
data$v2[data$v2== 1.2
]<- 5.351853e-03
data$v2[data$v2== 1.3
]<- 6.111070e-03
data$v2[data$v2== 1.4
]<- 7.106763e-03
data$v2[data$v2== 1.5
]<- 6.795609e-03
data$v2[data$v2== 1.6
]<- 7.704179e-03
data$v2[data$v2== 1.7
]<- 7.803749e-03
data$v2[data$v2== 1.8
]<- 8.027780e-03
data$v2[data$v2== 1.9
]<- 8.239365e-03
data$v2[data$v2== 2
]<- 8.674981e-03
data$v2[data$v2== 2.2
]<- 1.030543e-02
data$v2[data$v2== 2.3
]<- 9.409305e-03
data$v2[data$v2== 2.4
]<- 1.026809e-02
data$v2[data$v2== 2.5
]<- 1.061658e-02
data$v2[data$v2== 2.6
]<- 1.044234e-02
data$v2[data$v2== 2.7
]<- 1.138825e-02
data$v2[data$v2== 2.8
]<- 1.145048e-02
data$v2[data$v2== 2.9
]<- 1.258308e-02
data$v2[data$v2== 3
]<- 1.257063e-02
data$v2[data$v2== 3.1
]<- 1.258308e-02
data$v2[data$v2== 3.2
]<- 1.255819e-02
data$v2[data$v2== 3.3
]<- 1.316805e-02
data$v2[data$v2== 3.4
]<- 1.375302e-02
data$v2[data$v2== 3.5
]<- 1.279466e-02
data$v2[data$v2== 3.6
]<- 1.374057e-02
data$v2[data$v2== 3.7
]<- 1.323028e-02
data$v2[data$v2== 3.8
]<- 1.448734e-02
data$v2[data$v2== 3.9
]<- 1.346676e-02
data$v2[data$v2== 4
]<- 1.483584e-02
data$v2[data$v2== 4.1
]<- 1.449979e-02
data$v2[data$v2== 4.2
]<- 1.494785e-02
data$v2[data$v2== 4.3
]<- 1.457447e-02
data$v2[data$v2== 4.4
]<- 1.513454e-02
data$v2[data$v2== 4.5
]<- 1.584397e-02
data$v2[data$v2== 4.6
]<- 1.550793e-02
data$v2[data$v2== 4.7
]<- 1.603067e-02
data$v2[data$v2== 4.8
]<- 1.583153e-02
data$v2[data$v2== 4.9
]<- 1.569462e-02
data$v2[data$v2== 5
]<- 1.588131e-02
data$v2[data$v2== 5.1
]<- 1.524656e-02
data$v2[data$v2== 5.2
]<- 1.497274e-02
data$v2[data$v2== 5.3
]<- 1.461180e-02
data$v2[data$v2== 5.4
]<- 1.454957e-02
data$v2[data$v2== 5.5
]<- 1.519677e-02
data$v2[data$v2== 5.6
]<- 1.448734e-02
data$v2[data$v2== 5.7
]<- 1.350409e-02
data$v2[data$v2== 5.8
]<- 1.431310e-02
data$v2[data$v2== 5.9
]<- 1.213502e-02
data$v2[data$v2== 6
]<- 1.290668e-02
data$v2[data$v2== 6.1
]<- 1.311826e-02
data$v2[data$v2== 6.2
]<- 1.269509e-02
data$v2[data$v2== 6.3
]<- 1.171185e-02
data$v2[data$v2== 6.4
]<- 1.186120e-02
data$v2[data$v2== 6.5
]<- 1.115177e-02
data$v2[data$v2== 6.6
]<- 1.059169e-02
data$v2[data$v2== 6.7
]<- 1.081572e-02
data$v2[data$v2== 6.8
]<- 9.994275e-03
data$v2[data$v2== 6.9
]<- 9.608443e-03
data$v2[data$v2== 7
]<- 9.160381e-03
data$v2[data$v2== 7.1
]<- 9.770244e-03
data$v2[data$v2== 7.2
]<- 9.658228e-03
data$v2[data$v2== 7.3
]<- 8.625196e-03
data$v2[data$v2== 7.4
]<- 8.674981e-03
data$v2[data$v2== 7.5
]<- 7.940657e-03
data$v2[data$v2== 7.6
]<- 7.915764e-03
data$v2[data$v2== 7.7
]<- 7.915764e-03
data$v2[data$v2== 7.8
]<- 7.554825e-03
data$v2[data$v2== 7.9
]<- 7.641949e-03
data$v2[data$v2== 8
]<- 7.604610e-03
data$v2[data$v2== 8.1
]<- 7.231225e-03
data$v2[data$v2== 8.2
]<- 6.696040e-03
data$v2[data$v2== 8.3
]<- 6.123516e-03
data$v2[data$v2== 8.4
]<- 5.650561e-03
data$v2[data$v2== 8.5
]<- 6.036393e-03
data$v2[data$v2== 8.6
]<- 5.438976e-03
data$v2[data$v2== 8.7
]<- 5.401638e-03
data$v2[data$v2== 8.8
]<- 5.376746e-03
data$v2[data$v2== 8.9
]<- 5.040699e-03
data$v2[data$v2== 9
]<- 5.177607e-03
data$v2[data$v2== 9.1
]<- 5.003360e-03
data$v2[data$v2== 9.2
]<- 4.393498e-03
data$v2[data$v2== 9.3
]<- 4.331267e-03
data$v2[data$v2== 9.4
]<- 4.169467e-03
data$v2[data$v2== 9.5
]<- 4.418390e-03
data$v2[data$v2== 9.6
]<- 3.771190e-03
data$v2[data$v2== 9.7
]<- 3.796082e-03
data$v2[data$v2== 9.8
]<- 3.920544e-03
data$v2[data$v2== 9.9
]<- 4.231698e-03
data$v2[data$v2== 10
]<- 3.211112e-03
data$v2[data$v2== 10.1
]<- 2.987081e-03
data$v2[data$v2== 10.2
]<- 2.812834e-03
data$v2[data$v2== 10.3
]<- 3.310681e-03
data$v2[data$v2== 10.4
]<- 2.949742e-03
data$v2[data$v2== 10.5
]<- 2.738157e-03
data$v2[data$v2== 10.6
]<- 2.787942e-03
data$v2[data$v2== 10.7
]<- 2.837727e-03
data$v2[data$v2== 10.8
]<- 2.314988e-03
data$v2[data$v2== 10.9
]<- 2.252757e-03
data$v2[data$v2== 11
]<- 2.128295e-03
data$v2[data$v2== 11.1
]<- 2.128295e-03
data$v2[data$v2== 11.2
]<- 2.115849e-03
data$v2[data$v2== 11.3
]<- 1.941603e-03
data$v2[data$v2== 11.4
]<- 1.468648e-03
data$v2[data$v2== 11.5
]<- 1.530879e-03
data$v2[data$v2== 11.6
]<- 1.356633e-03
data$v2[data$v2== 11.7
]<- 1.555771e-03
data$v2[data$v2== 11.8
]<- 1.431310e-03
data$v2[data$v2== 11.9
]<- 1.294402e-03
data$v2[data$v2== 12
]<- 1.306848e-03
data$v2[data$v2== 12.1
]<- 1.095263e-03
data$v2[data$v2== 12.2
]<- 1.169940e-03
data$v2[data$v2== 12.3
]<- 1.232171e-03
data$v2[data$v2== 12.4
]<- 9.832475e-04
data$v2[data$v2== 12.5
]<- 1.157494e-03
data$v2[data$v2== 12.6
]<- 9.334628e-04
data$v2[data$v2== 12.7
]<- 8.961243e-04
data$v2[data$v2== 12.8
]<- 7.716626e-04
data$v2[data$v2== 12.9
]<- 1.033032e-03
data$v2[data$v2== 13
]<- 8.090011e-04
data$v2[data$v2== 13.1
]<- 6.596470e-04
data$v2[data$v2== 13.2
]<- 6.223085e-04
data$v2[data$v2== 13.3
]<- 6.596470e-04
data$v2[data$v2== 13.4
]<- 6.720932e-04
data$v2[data$v2== 13.5
]<- 5.600777e-04
data$v2[data$v2== 13.6
]<- 4.729545e-04
data$v2[data$v2== 13.7
]<- 5.102930e-04
data$v2[data$v2== 13.8
]<- 2.862619e-04
data$v2[data$v2== 13.9
]<- 2.613696e-04
data$v2[data$v2== 14
]<- 4.107236e-04
data$v2[data$v2== 14.1
]<- 2.738157e-04
data$v2[data$v2== 14.2
]<- 2.364772e-04
data$v2[data$v2== 14.3
]<- 2.115849e-04
data$v2[data$v2== 14.4
]<- 1.618002e-04
data$v2[data$v2== 14.5
]<- 1.244617e-04
data$v2[data$v2== 14.6
]<- 9.956936e-05
data$v2[data$v2== 14.7
]<- 6.223085e-05
data$v2[data$v2== 14.8
]<- 8.712319e-05
data$v2[data$v2== 14.9
]<- 7.467702e-05
data$v2[data$v2== 15
]<- 4.978468e-05
data$v2[data$v2== 15.1
]<- 1.369079e-04
data$v2[data$v2== 15.2
]<- 6.223085e-05
data$v2[data$v2== 15.3
]<- 4.978468e-05
data$v2[data$v2== 15.4
]<- 3.733851e-05
data$v2[data$v2== 15.5
]<- 4.978468e-05
data$v2[data$v2== 15.6
]<- 6.223085e-05
data$v2[data$v2== 15.7
]<- 2.489234e-05
data$v2[data$v2== 15.8
]<- 1.244617e-05
data$v2[data$v2== 15.9
]<- 2.489234e-05
data$v2[data$v2== 16
]<- 3.733851e-05
data$v2[data$v2== 16.1
]<- 2.489234e-05
data$v2[data$v2== 16.2
]<- 4.978468e-05
data$v2[data$v2== 16.3
]<- 1.244617e-05
data$v2[data$v2== 16.4
]<- 3.733851e-05
data$v2[data$v2== 16.5
]<- 2.489234e-05
data$v2[data$v2== 16.6
]<- 1.244617e-05
data$v2[data$v2== 16.7
]<- 2.489234e-05
data$v2[data$v2== 16.8
]<- 1.244617e-05
data$v2[data$v2== 17.3
]<- 1.244617e-05
data$v2[data$v2== 17.5
]<- 2.489234e-05
data$v2[data$v2== 17.9
]<- 1.244617e-05
data$v2[data$v2== 18.1
]<- 1.244617e-05
data$v2[data$v2== 18.9
]<- 1.244617e-05
data$v2[data$v2== 20.3
]<- 1.244617e-05
data$v2[data$v2== 21.5
]<- 1.244617e-05
data=data.frame(data$v2)
write.table(data,
"exportR.txt")
getwd() # setwd()
data<-read.csv(file("clipboard"),header=T,sep="\t",
dec=",",row.names=1)
str(data);
str(data)
#
GOUBBET STATION
goubet$F_60[goubet$F_60==0.4 ]<- 3.986314e-03
goubet$F_60[goubet$F_60==0.5 ]<- 1.151360e-03
goubet$F_60[goubet$F_60==0.6 ]<- 1.031880e-03
goubet$F_60[goubet$F_60==0.7 ]<- 9.015370e-04
goubet$F_60[goubet$F_60==0.8 ]<- 1.042742e-03
goubet$F_60[goubet$F_60==0.9 ]<- 1.238256e-03
goubet$F_60[goubet$F_60==1 ]<- 1.118775e-03
goubet$F_60[goubet$F_60==1.1 ]<- 1.238256e-03
goubet$F_60[goubet$F_60==1.2 ]<- 1.292565e-03
goubet$F_60[goubet$F_60==1.3 ]<- 1.422908e-03
goubet$F_60[goubet$F_60==1.4 ]<- 1.336013e-03
goubet$F_60[goubet$F_60==1.5 ]<- 1.227394e-03
goubet$F_60[goubet$F_60==1.6 ]<- 1.629284e-03
goubet$F_60[goubet$F_60==1.7 ]<- 1.813936e-03
goubet$F_60[goubet$F_60==1.8 ]<- 1.857383e-03
goubet$F_60[goubet$F_60==1.9 ]<- 1.911693e-03
goubet$F_60[goubet$F_60==2 ]<- 2.139793e-03
goubet$F_60[goubet$F_60==2.1 ]<- 2.270135e-03
goubet$F_60[goubet$F_60==2.2 ]<- 2.563406e-03
goubet$F_60[goubet$F_60==2.3 ]<- 2.204964e-03
goubet$F_60[goubet$F_60==2.4 ]<- 2.737197e-03
goubet$F_60[goubet$F_60==2.5 ]<- 2.519959e-03
goubet$F_60[goubet$F_60==2.6 ]<- 2.856677e-03
goubet$F_60[goubet$F_60==2.7 ]<- 3.356324e-03
goubet$F_60[goubet$F_60==2.8 ]<- 3.215120e-03
goubet$F_60[goubet$F_60==2.9 ]<- 3.269429e-03
goubet$F_60[goubet$F_60==3 ]<- 3.660457e-03
goubet$F_60[goubet$F_60==3.1 ]<- 3.660457e-03
goubet$F_60[goubet$F_60==3.2 ]<- 3.584424e-03
goubet$F_60[goubet$F_60==3.3 ]<- 4.257861e-03
goubet$F_60[goubet$F_60==3.4 ]<- 4.018900e-03
goubet$F_60[goubet$F_60==3.5 ]<- 4.399066e-03
goubet$F_60[goubet$F_60==3.6 ]<- 4.757508e-03
goubet$F_60[goubet$F_60==3.7 ]<- 4.638027e-03
goubet$F_60[goubet$F_60==3.8 ]<- 4.974746e-03
goubet$F_60[goubet$F_60==3.9 ]<- 5.029056e-03
goubet$F_60[goubet$F_60==4 ]<- 5.354912e-03
goubet$F_60[goubet$F_60==4.1 ]<- 5.344050e-03
goubet$F_60[goubet$F_60==4.2 ]<- 5.550426e-03
goubet$F_60[goubet$F_60==4.3 ]<- 5.626460e-03
goubet$F_60[goubet$F_60==4.4 ]<- 5.865421e-03
goubet$F_60[goubet$F_60==4.5 ]<- 6.202140e-03
goubet$F_60[goubet$F_60==4.6 ]<- 6.386792e-03
goubet$F_60[goubet$F_60==4.7 ]<- 6.788682e-03
goubet$F_60[goubet$F_60==4.8 ]<- 6.734372e-03
goubet$F_60[goubet$F_60==4.9 ]<- 6.289035e-03
goubet$F_60[goubet$F_60==5 ]<- 6.636615e-03
goubet$F_60[goubet$F_60==5.1 ]<- 6.832129e-03
goubet$F_60[goubet$F_60==5.2 ]<- 7.223158e-03
goubet$F_60[goubet$F_60==5.3 ]<- 7.559876e-03
goubet$F_60[goubet$F_60==5.4 ]<- 7.299191e-03
goubet$F_60[goubet$F_60==5.5 ]<- 6.908163e-03
goubet$F_60[goubet$F_60==5.6 ]<- 7.581600e-03
goubet$F_60[goubet$F_60==5.7 ]<- 7.896595e-03
goubet$F_60[goubet$F_60==5.8 ]<- 7.559876e-03
goubet$F_60[goubet$F_60==5.9 ]<- 7.299191e-03
goubet$F_60[goubet$F_60==6 ]<- 7.092815e-03
goubet$F_60[goubet$F_60==6.1 ]<- 7.679357e-03
goubet$F_60[goubet$F_60==6.2 ]<- 7.244881e-03
goubet$F_60[goubet$F_60==6.3 ]<- 7.549014e-03
goubet$F_60[goubet$F_60==6.4 ]<- 7.777114e-03
goubet$F_60[goubet$F_60==6.5 ]<- 7.440395e-03
goubet$F_60[goubet$F_60==6.6 ]<- 7.016782e-03
goubet$F_60[goubet$F_60==6.7 ]<- 7.603324e-03
goubet$F_60[goubet$F_60==6.8 ]<- 7.092815e-03
goubet$F_60[goubet$F_60==6.9 ]<- 7.635909e-03
goubet$F_60[goubet$F_60==7 ]<- 7.494705e-03
goubet$F_60[goubet$F_60==7.1 ]<- 7.885733e-03
goubet$F_60[goubet$F_60==7.2 ]<- 8.613480e-03
goubet$F_60[goubet$F_60==7.3 ]<- 8.113833e-03
goubet$F_60[goubet$F_60==7.4 ]<- 8.146418e-03
goubet$F_60[goubet$F_60==7.5 ]<- 8.570032e-03
goubet$F_60[goubet$F_60==7.6 ]<- 7.635909e-03
goubet$F_60[goubet$F_60==7.7 ]<- 7.896595e-03
goubet$F_60[goubet$F_60==7.8 ]<- 8.515723e-03
goubet$F_60[goubet$F_60==7.9 ]<- 7.907457e-03
goubet$F_60[goubet$F_60==8 ]<- 8.309347e-03
goubet$F_60[goubet$F_60==8.1 ]<- 8.689513e-03
goubet$F_60[goubet$F_60==8.2 ]<- 8.895889e-03
goubet$F_60[goubet$F_60==8.3 ]<- 8.906751e-03
goubet$F_60[goubet$F_60==8.4 ]<- 9.362950e-03
goubet$F_60[goubet$F_60==8.5 ]<- 9.362950e-03
goubet$F_60[goubet$F_60==8.6 ]<- 9.688807e-03
goubet$F_60[goubet$F_60==8.7 ]<- 8.819856e-03
goubet$F_60[goubet$F_60==8.8 ]<- 9.764840e-03
goubet$F_60[goubet$F_60==8.9 ]<- 9.895183e-03
goubet$F_60[goubet$F_60==9 ]<- 9.992940e-03
goubet$F_60[goubet$F_60==9.1 ]<- 9.395536e-03
goubet$F_60[goubet$F_60==9.2 ]<- 1.006897e-02
goubet$F_60[goubet$F_60==9.3 ]<- 9.558464e-03
goubet$F_60[goubet$F_60==9.4 ]<- 1.026449e-02
goubet$F_60[goubet$F_60==9.5 ]<- 9.656221e-03
goubet$F_60[goubet$F_60==9.6 ]<- 9.938630e-03
goubet$F_60[goubet$F_60==9.7 ]<- 9.286917e-03
goubet$F_60[goubet$F_60==9.8 ]<- 9.721392e-03
goubet$F_60[goubet$F_60==9.9 ]<- 9.873459e-03
goubet$F_60[goubet$F_60==10 ]<- 8.863303e-03
goubet$F_60[goubet$F_60==10.1
]<- 9.906045e-03
goubet$F_60[goubet$F_60==10.2
]<- 9.916907e-03
goubet$F_60[goubet$F_60==10.3
]<- 9.286917e-03
goubet$F_60[goubet$F_60==10.4
]<- 9.960354e-03
goubet$F_60[goubet$F_60==10.5
]<- 9.058817e-03
goubet$F_60[goubet$F_60==10.6
]<- 1.010156e-02
goubet$F_60[goubet$F_60==10.7
]<- 8.678651e-03
goubet$F_60[goubet$F_60==10.8
]<- 8.722099e-03
goubet$F_60[goubet$F_60==10.9
]<- 8.874165e-03
goubet$F_60[goubet$F_60==11 ]<- 8.287623e-03
goubet$F_60[goubet$F_60==11.1
]<- 8.950198e-03
goubet$F_60[goubet$F_60==11.2
]<- 7.896595e-03
goubet$F_60[goubet$F_60==11.3
]<- 9.004508e-03
goubet$F_60[goubet$F_60==11.4
]<- 7.820562e-03
goubet$F_60[goubet$F_60==11.5
]<- 8.287623e-03
goubet$F_60[goubet$F_60==11.6
]<- 8.450551e-03
goubet$F_60[goubet$F_60==11.7
]<- 8.852441e-03
goubet$F_60[goubet$F_60==11.8
]<- 9.352088e-03
goubet$F_60[goubet$F_60==11.9
]<- 8.982784e-03
goubet$F_60[goubet$F_60==12 ]<- 8.950198e-03
goubet$F_60[goubet$F_60==12.1
]<- 8.841579e-03
goubet$F_60[goubet$F_60==12.2
]<- 8.787270e-03
goubet$F_60[goubet$F_60==12.3
]<- 8.472275e-03
goubet$F_60[goubet$F_60==12.4
]<- 9.167436e-03
goubet$F_60[goubet$F_60==12.5
]<- 8.374518e-03
goubet$F_60[goubet$F_60==12.6
]<- 8.689513e-03
goubet$F_60[goubet$F_60==12.7
]<- 8.765546e-03
goubet$F_60[goubet$F_60==12.8
]<- 8.472275e-03
goubet$F_60[goubet$F_60==12.9
]<- 7.896595e-03
goubet$F_60[goubet$F_60==13 ]<- 8.222452e-03
goubet$F_60[goubet$F_60==13.1
]<- 8.070385e-03
goubet$F_60[goubet$F_60==13.2
]<- 6.984196e-03
goubet$F_60[goubet$F_60==13.3
]<- 6.451963e-03
goubet$F_60[goubet$F_60==13.4
]<- 7.494705e-03
goubet$F_60[goubet$F_60==13.5
]<- 6.669201e-03
goubet$F_60[goubet$F_60==13.6
]<- 6.126107e-03
goubet$F_60[goubet$F_60==13.7
]<- 6.527997e-03
goubet$F_60[goubet$F_60==13.8
]<- 6.245587e-03
goubet$F_60[goubet$F_60==13.9
]<- 5.832835e-03
goubet$F_60[goubet$F_60==14 ]<- 5.724217e-03
goubet$F_60[goubet$F_60==14.1
]<- 5.702493e-03
goubet$F_60[goubet$F_60==14.2
]<- 5.191984e-03
goubet$F_60[goubet$F_60==14.3
]<- 5.311465e-03
goubet$F_60[goubet$F_60==14.4
]<- 4.388204e-03
goubet$F_60[goubet$F_60==14.5
]<- 4.942160e-03
goubet$F_60[goubet$F_60==14.6
]<- 4.018900e-03
goubet$F_60[goubet$F_60==14.7
]<- 4.008038e-03
goubet$F_60[goubet$F_60==14.8
]<- 3.573562e-03
goubet$F_60[goubet$F_60==14.9
]<- 3.486667e-03
goubet$F_60[goubet$F_60==15 ]<- 3.204258e-03
goubet$F_60[goubet$F_60==15.1
]<- 3.258567e-03
goubet$F_60[goubet$F_60==15.2
]<- 3.269429e-03
goubet$F_60[goubet$F_60==15.3
]<- 3.139087e-03
goubet$F_60[goubet$F_60==15.4
]<- 2.987020e-03
goubet$F_60[goubet$F_60==15.5
]<- 2.031174e-03
goubet$F_60[goubet$F_60==15.6
]<- 2.672025e-03
goubet$F_60[goubet$F_60==15.7
]<- 2.204964e-03
goubet$F_60[goubet$F_60==15.8
]<- 1.966002e-03
goubet$F_60[goubet$F_60==15.9
]<- 1.955140e-03
goubet$F_60[goubet$F_60==16 ]<- 1.748764e-03
goubet$F_60[goubet$F_60==16.1
]<- 1.803074e-03
goubet$F_60[goubet$F_60==16.2
]<- 1.651007e-03
goubet$F_60[goubet$F_60==16.3
]<- 1.303427e-03
goubet$F_60[goubet$F_60==16.4
]<- 1.292565e-03
goubet$F_60[goubet$F_60==16.5
]<- 1.422908e-03
goubet$F_60[goubet$F_60==16.6
]<- 1.118775e-03
goubet$F_60[goubet$F_60==16.7
]<- 1.183946e-03
goubet$F_60[goubet$F_60==16.8
]<- 9.341226e-04
goubet$F_60[goubet$F_60==16.9
]<- 9.341226e-04
goubet$F_60[goubet$F_60==17 ]<- 7.494705e-04
goubet$F_60[goubet$F_60==17.1
]<- 6.951610e-04
goubet$F_60[goubet$F_60==17.2
]<- 5.756802e-04
goubet$F_60[goubet$F_60==17.3
]<- 6.734372e-04
goubet$F_60[goubet$F_60==17.4
]<- 4.887851e-04
goubet$F_60[goubet$F_60==17.5
]<- 4.236138e-04
goubet$F_60[goubet$F_60==17.6
]<- 3.258567e-04
goubet$F_60[goubet$F_60==17.7
]<- 3.149948e-04
goubet$F_60[goubet$F_60==17.8
]<- 4.236138e-04
goubet$F_60[goubet$F_60==17.9
]<- 2.498235e-04
goubet$F_60[goubet$F_60==18 ]<- 3.149948e-04
goubet$F_60[goubet$F_60==18.1
]<- 3.041329e-04
goubet$F_60[goubet$F_60==18.2
]<- 2.715473e-04
goubet$F_60[goubet$F_60==18.3
]<- 2.824092e-04
goubet$F_60[goubet$F_60==18.4
]<- 1.086189e-04
goubet$F_60[goubet$F_60==18.5
]<- 1.520665e-04
goubet$F_60[goubet$F_60==18.6
]<- 2.063759e-04
goubet$F_60[goubet$F_60==18.7
]<- 2.280997e-04
goubet$F_60[goubet$F_60==18.8
]<- 2.172378e-04
goubet$F_60[goubet$F_60==18.9
]<- 1.955140e-04
goubet$F_60[goubet$F_60==19 ]<- 1.520665e-04
goubet$F_60[goubet$F_60==19.1
]<- 1.086189e-04
goubet$F_60[goubet$F_60==19.2
]<- 5.430946e-05
goubet$F_60[goubet$F_60==19.3
]<- 5.430946e-05
goubet$F_60[goubet$F_60==19.4
]<- 3.258567e-05
goubet$F_60[goubet$F_60==19.5
]<- 2.172378e-05
goubet$F_60[goubet$F_60==19.9
]<- 1.086189e-05
goubet$F_60[goubet$F_60==20.3
]<- 1.086189e-05
goubet$F_60[goubet$F_60==20.8
]<- 1.086189e-05
data=data.frame(goubet$F_60)
write.table(data,
"exportR.txt")
getwd() # setwd()
r<-read.csv(file("clipboard"),header=T,sep="\t",
dec=",",row.names=1)
str(r)
#
NEW QUICK WAY
POUT=0.5*1.225*(cmm^3)*gamma((kmm+3)/kmm);
POUT
y=hist(v2,
freq=TRUE) #breaks=60
#kmm kmv
kpd kcf kmq
keml kls kmml
kee kpwm kwasp kwls
#weibullmm=(kmm/cmm)*((v2/cmm)^(kmm-1))*exp(-((v2/cmm)^kmm)) #Long Way
hist(v2,breaks
= "Sturges", freq=F)
summary(v2)
cbind(y$density)
cbind(y$mids)
cbind(y$counts)
#cbind(y$breaks)
###
weibullj=(kmm/cmm)*((y$mids/cmm)^(kmm-1))*exp(-((y$mids/cmm)^kmm)) #Short Way
cbind(weibullj)
weibullmv=(kmv/cmv)*((y$mids/cmv)^(kmv-1))*exp(-((y$mids/cmv)^kmv))
weibullwasp=(kwasp/cwasp)*((y$mids/cwasp)^(kwasp-1))*exp(-((y$mids/cwasp)^kwasp))
weibullmmn=(k/c)*((y$mids/c)^(k-1))*exp(-((y$mids/c)^k))
###
weibullp=(kpd/cpd)*((y$mids/cpd)^(kpd-1))*exp(-((y$mids/cpd)^kpd))
weibullf=(kcf/ccf)*((y$mids/ccf)^(kcf-1))*exp(-((y$mids/ccf)^kcf))
weibullq=(kmq/cmq)*((y$mids/cmq)^(kmq-1))*exp(-((y$mids/cmq)^kmq))
weibulll=(keml/ceml)*((y$mids/ceml)^(keml-1))*exp(-((y$mids/ceml)^keml))
weibulls=(kls/cls)*((y$mids/cls)^(kls-1))*exp(-((y$mids/cls)^kls))
weibullmm=(kmml/cmml)*((y$mids/cmml)^(kmml-1))*exp(-((y$mids/cmml)^kmml))
weibulle=(kee/cee)*((y$mids/cee)^(kee-1))*exp(-((y$mids/cee)^kee))
weibullw=(kpwm/cpwm)*((y$mids/cpwm)^(kpwm-1))*exp(-((y$mids/cpwm)^kpwm))
weibullwa=(kwasp/cwasp)*((y$mids/cwasp)^(kwasp-1))*exp(-((y$mids/cwasp)^kwasp))
weibullwl=(kwls/cwls)*((y$mids/cwls)^(kwls-1))*exp(-((y$mids/cwls)^kwls))
FCm=1-exp(-((y$mids/cmm)^kmm))
FCv=1-exp(-((y$mids/cmv)^kmv))
FCwa=1-exp(-((y$mids/cwasp)^kwasp))
FCmm=1-exp(-((y$mids/c)^k))
cbind(y$mids,y$density,weibullm,weibullv,weibullwa,weibullmm,
FCm, FCv,FCwa,FCmm)
FCp=1-exp(-((y$mids/cpd)^kpd))
FCf=1-exp(-((y$mids/ccf)^kcf))
cbind(y$mids,y$density,weibullm,weibullv,weibullp,weibullf,
weibullmse, FCm, FCv,FCp,FCf,Fmse)
#Weibull
MSE
k=mean(kmm,
kmv, kpd,kcf)+0.15; k
c=mean(cmm,
cmv, cpd,kccf)+0.19; c
weibullmse=(k/c)*((y$mids/c)^(k-1))*exp(-((y$mids/c)^k))
Fmse=1-exp(-((y$mids/c)^k))
#1.810703e-07
MSE
= (sum(weibullmse-y$density)^2)/length(weibullmse); MSE
rmse=sqrt(sum((y$density-weibullmse)^2)/length(y$density));
rmse
R2=
1-(sum((y$density-weibullmse)^2)/sum((y$density-mean(y$density) )^2)); R2
# AUTOMATIC EN HAUT
FONCTION PAR DEFAUT
#Weibull Hybrid
# V=0 supérieure ou
égale à 15%.
# ffo=fvo
# weibull=
(1-ffo)*(kmm/cmm)*((y$mids/cmm)^(kmm-1))*exp(-((y$mids/cmm)^kmm))
#
#GENERAL
RESULT
rbind(kmm,kmv,kpd,kcf,cmm,cmv,cpd,ccf)
rbind(kmq,keml,kls,kmml,k,
cmq,ceml,cls,cmml,c)
rbind(kee,kpwm,kwasp,kwls,cee,cpwm,cwasp,cwls)
weib=weibullej
R2=
1-(sum((y$density-weib)^2)/sum((y$density-mean(y$density) )^2))
MAE=sum(abs((y$density-weib)/1))/length(weib)
round(RMSE,5);
round(R2,5);round(MAE,5)
#weibullej weibullmm weibullmv weibullwasp
weibullmq weibulleml weibullpd
rbind(kej,kmv,kpd,keml,kwasp,kmq,kmm,
cmm, cmq, cwasp, cej,cmv,cpd,ceml2)
y=hist(v2,
prob=F); dev.off() # breaks=85
cbind(
y$mids,y$density)
ceml=ceml2
#weibullej weibullmm weibullmv weibullwasp
weibullmq weibulleml weibullpd
weibullej=(kej/cej)*((y$mids/cej)^(kej-1))*exp(-((y$mids/cej)^kej)) #Short Way
weibullmm=(kmm/cmm)*((y$mids/cmm)^(kmm-1))*exp(-((y$mids/cmm)^kmm)) #Short Way
weibullmv=(kmv/cmv)*((y$mids/cmv)^(kmv-1))*exp(-((y$mids/cmv)^kmv))
weibullwasp=(kwasp/cwasp)*((y$mids/cwasp)^(kwasp-1))*exp(-((y$mids/cwasp)^kwasp))
weibullmq=(kmq/cmq)*((y$mids/cmq)^(kmq-1))*exp(-((y$mids/cmq)^kmq))
weibulleml=(keml/ceml)*((y$mids/ceml)^(keml-1))*exp(-((y$mids/ceml)^keml))
weibullpd=(kpd/cpd)*((y$mids/cpd)^(kpd-1))*exp(-((y$mids/cpd)^kpd))
cbind(
y$mids,y$density,weibullej,weibullmm,weibullmv,weibullwasp,weibullmq,weibulleml,weibullpd)
FCej=1-exp(-((y$mids/cej)^kej))
FCmm=1-exp(-((y$mids/cmm)^kmm))
FCmv=1-exp(-((y$mids/cmv)^kmv))
FCwasp=1-exp(-((y$mids/cwasp)^kwasp))
FCmq=1-exp(-((y$mids/cmq)^kmq))
FCeml=1-exp(-((y$mids/ceml)^keml))
FCpd=1-exp(-((y$mids/cpd)^kpd))
cbind(
y$mids,y$density,FCej,FCmm,FCmv,FCwasp,FCmq,FCeml,FCpd)
rbind(kej,kmv,kpd,keml,kwasp,kmq,kmm,
cmm, cmq, cwasp, cej,cmv,cpd,ceml2)
cbind(round(c(kej,kmv,kpd,keml,kwasp,kmq,kmm,
cmm, cmq, cwasp, cej,cmv,cpd,ceml2),3))
#weibullej weibullmm weibullmv weibullwasp
weibullmq weibulleml weibullpd
RMSEej=sqrt(sum((y$density-weibullej)^2)/length(y$density))
RMSEmm=sqrt(sum((y$density-weibullmm)^2)/length(y$density))
RMSEmv=sqrt(sum((y$density-weibullmv)^2)/length(y$density))
RMSEwasp=sqrt(sum((y$density-weibullwasp)^2)/length(y$density))
RMSEmq=sqrt(sum((y$density-weibullmq)^2)/length(y$density))
RMSEeml=sqrt(sum((y$density-weibulleml)^2)/length(y$density))
RMSEpd=sqrt(sum((y$density-weibullpd)^2)/length(y$density))
round(RMSEpd,5 )
R2ej=
1-(sum((y$density-weibullej)^2)/sum((y$density-mean(y$density) )^2))
R2mm=
1-(sum((y$density-weibullmm)^2)/sum((y$density-mean(y$density) )^2))
R2mv=
1-(sum((y$density-weibullmv)^2)/sum((y$density-mean(y$density) )^2))
R2wasp=
1-(sum((y$density-weibullwasp)^2)/sum((y$density-mean(y$density) )^2))
R2mq=
1-(sum((y$density-weibullmq)^2)/sum((y$density-mean(y$density) )^2))
R2eml=
1-(sum((y$density-weibulleml)^2)/sum((y$density-mean(y$density) )^2))
R2pd=
1-(sum((y$density-weibullpd)^2)/sum((y$density-mean(y$density) )^2))
round(R2pd,5 )
rmse1=sqrt(sum((y$density-weibullm)^2)/length(y$density))
rmse2=sqrt(sum((y$density-weibullv)^2)/length(y$density))
rmse3=sqrt(sum((y$density-weibullp)^2)/length(y$density))
rmse4=sqrt(sum((y$density-weibullf)^2)/length(y$density))
rmse5=sqrt(sum((y$density-weibullq)^2)/length(y$density))
rmse6=sqrt(sum((y$density-weibulll)^2)/length(y$density))
rmse7=sqrt(sum((y$density-weibulls)^2)/length(y$density))
rmse8=sqrt(sum((y$density-weibullmm)^2)/length(y$density))
rmse9=sqrt(sum((y$density-weibulle)^2)/length(y$density))
rmse10=sqrt(sum((y$density-weibullw)^2)/length(y$density))
rmse11=sqrt(sum((y$density-weibullwa)^2)/length(y$density))
rmse12=sqrt(sum((y$density-weibullwl)^2)/length(y$density))
rbind(rmse1,rmse2,rmse3,rmse4,rmse5,rmse6,rmse7,rmse8,rmse9,rmse10,rmse11,rmse12)
R2m1=
1-(sum((y$density-weibullm)^2)/sum((y$density-mean(y$density) )^2))
R2m2=
1-(sum((y$density-weibullv)^2)/sum((y$density-mean(y$density) )^2))
R2m3=
1-(sum((y$density-weibullp)^2)/sum((y$density-mean(y$density) )^2))
R2m4=
1-(sum((y$density-weibullf)^2)/sum((y$density-mean(y$density) )^2))
R2m5=
1-(sum((y$density-weibullq)^2)/sum((y$density-mean(y$density) )^2))
R2m6=
1-(sum((y$density-weibulll)^2)/sum((y$density-mean(y$density) )^2))
R2m7=
1-(sum((y$density-weibulls)^2)/sum((y$density-mean(y$density) )^2))
R2m8=
1-(sum((y$density-weibullmm)^2)/sum((y$density-mean(y$density) )^2))
R2m9=
1-(sum((y$density-weibulle)^2)/sum((y$density-mean(y$density) )^2))
R2m10=
1-(sum((y$density-weibullw)^2)/sum((y$density-mean(y$density) )^2))
R2m11=
1-(sum((y$density-weibullwa)^2)/sum((y$density-mean(y$density) )^2))
R2m12=
1-(sum((y$density-weibullwl)^2)/sum((y$density-mean(y$density) )^2))
rbind(R2m1,R2m2,R2m3,R2m4,R2m5,R2m6,R2m7,R2m8,R2m9,R2m10,R2m11,R2m12)
barplot(y$density~y$mids,
col="blue3", xlab="", ylab="")
par(new=T);
plot(weibullm~y$mids, type="b", pch=19, col="red",
axes=FALSE, add=T, xlan=NA, ylab=NA)
par(new=T);
plot(weibullmse~y$mids, type="b", pch=20, col="green",
axes=FALSE, add=T, xlan=NA, ylab=NA)
#
-4-Fitted Weibull Distribution
#s=dweibull(V_60,
shape=kmm, scale = cmm)
weibullnm=(k/c)*((v2/c)^(k-1))*exp(-((v2/c)^k))
weibullmm=(kmm/cmm)*((v2/cmm)^(kmm-1))*exp(-((v2/cmm)^kmm))
weibullmv=(kmv/cmv)*(v2/cmv)^(kmv-1)*exp(-((v2/cmv)^kmv))
weibullpd=(kpd/cpd)*(v2/cpd)^(kpd-1)*exp(-((v2/cpd)^kpd))
weibullcf=(kcf/ccf)*(v2/ccf)^(kcf-1)*exp(-((v2/ccf)^kcf))
weibullkmq=(kmq/cmq)*(v2/cmq)^(kmq-1)*exp(-((v2/cmq)^kmq))
weibulleml=(keml/ceml)*(v2/ceml)^(keml-1)*exp(-((v2/ceml)^keml))
weibullls=(kls/cls)*(v2/cls)^(kls-1)*exp(-((v2/cls)^kls))
weibullmml=(kmml/cmml)*(v2/cmml)^(kmml-1)*exp(-((v2/cmml)^kmml))
weibullee=(kee/cee)*(v2/cee)^(kee-1)*exp(-((v2/cee)^kee))
weibullpwm=(kpwm/cpwm)*(v2/cpwm)^(kpwm-1)*exp(-((v2/cpwm)^kpwm))
weibullwasp=(kwasp/cwasp)*(v2/cwasp)^(kwasp-1)*exp(-((v2/cwasp)^kwasp))
weibullwls=(kwls/cwls)*(v2/cwls)^(kwls-1)*exp(-((v2/cwls)^kwls))
#data=data.frame(weibullmm,weibullmv,weibullpd,weibullcf,weibullkmq,weibulleml)
#write.table(data,"exportR.txt")
FCmm=1-exp(-((v2/cmm)^kmm))
FCmv=1-exp(-((v2/cmv)^kmv))
FCpd=1-exp(-((v2/cpd)^kpd))
FCcf=1-exp(-((v2/ccf)^kcf))
FCkmq=1-exp(-((v2/cmq)^kmq))
FCeml=1-exp(-((v2/ceml)^keml))
FCls=1-exp(-((v2/cls)^kls))
FCmml=1-exp(-((v2/cmml)^kmml))
FCee=1-exp(-((v2/cee)^kee))
FCpwm=1-exp(-((v2/cpwm)^kpwm))
FCwasp=1-exp(-((v2/cwasp)^kwasp))
FCwls=1-exp(-((v2/cwls)^kwls))
#data=data.frame(FCmm,FCmv,FCpd,FCcf,FCkmq,FCeml)
#write.table(data,"exportRFC.txt")
#QUALITY
MODEL FIT OF DISTRIBUTION
a=hist(weibullmmn); dev.off()
b=hist(weibullmv); dev.off()
c=hist(weibullj); dev.off()
d=hist(weibullwasp);
dev.off()
y=hist(v2, prob=T)
Fq=y$density
w=weibullj
mbemm=sum(Fq-w)/length(Fq);mbemm
R2mm=1-sum((Fq-w)^2)/sum((Fq-moy)^2);
R2mm
#FREQUENCY
OF WIND SPEED
names(r)
Fq=data$F_40
#COEFFICIENT
OD DETERMINATION
a=sum((Fq-mean(v2))^2)
bmm=sum((Fq-weibullmm)^2)
bmv=sum((Fq-weibullmv)^2)
bpd=sum((Fq-weibullpd)^2)
bcf=sum((Fq-weibullcf)^2)
bkmq=sum((Fq-weibullkmq)^2)
beml=sum((Fq-weibulleml)^2)
bls=sum((Fq-weibullls)^2)
bmml=sum((Fq-weibullmml)^2)
bee=sum((Fq-weibullee)^2)
bpwm=sum((Fq-weibullpwm)^2)
bwasp=sum((Fq-weibullwasp)^2)
bwls=sum((Fq-weibullwls)^2)
coefdetmm=(a-bmm)/a
coefdetmv=(a-bmv)/a
coefdetpd=(a-bpd)/a
coefdetcf=(a-bcf)/a
coefdetkmq=(a-bkmq)/a
coefdeteml=(a-beml)/a
coefdetls=(a-bls)/a
coefdetmml=(a-bmml)/a
coefdetee=(a-bee)/a
coefdetpwm=(a-bpwm)/a
coefdetwasp=(a-bwasp)/a
coefdetwls=(a-bwls)/a
R2mm=1-sum((Fq-weibullmm)^2)/sum((Fq-moy)^2)
R2mv=1-sum((Fq-weibullmv)^2)/sum((Fq-moy)^2)
R2pd=1-sum((Fq-weibullpd)^2)/sum((Fq-moy)^2)
R2cf=1-sum((Fq-weibullcf)^2)/sum((Fq-moy)^2)
R2kmq=1-sum((Fq-weibullkmq)^2)/sum((Fq-moy)^2)
R2eml=1-sum((Fq-weibulleml)^2)/sum((Fq-moy)^2)
R2ls=1-sum((Fq-weibullls)^2)/sum((Fq-moy)^2)
R2mml=1-sum((Fq-weibullmml)^2)/sum((Fq-moy)^2)
R2ee=1-sum((Fq-weibullee)^2)/sum((Fq-moy)^2)
R2pwm=1-sum((Fq-weibullpwm)^2)/sum((Fq-moy)^2)
R2wasp=1-sum((Fq-weibullwasp)^2)/sum((Fq-moy)^2)
R2wls=1-sum((Fq-weibullwls)^2)/sum((Fq-moy)^2)
rmsemm=sqrt(sum((weibullmm-F_40)^2)/length(F_40))
#ROOT
MEAN SQUARE ERROR / DEVIATION
#RMSD=RMSES
rmsemm=sqrt(sum((weibullmm-Fq)^2)/length(Fq))
rmsemv=sqrt(sum((weibullmv-Fq)^2)/length(Fq))
rmsepd=sqrt(sum((Fq-weibullpd)^2)/length(Fq))
rmsecf=sqrt(sum((Fq-weibullcf)^2)/length(Fq))
rmsekmq=sqrt(sum((Fq-weibullkmq)^2)/length(Fq))
rmseeml=sqrt(sum((Fq-weibulleml)^2)/length(Fq))
rmsels=sqrt(sum((Fq-weibullls)^2)/length(Fq))
rmsemml=sqrt(sum((Fq-weibullmml)^2)/length(Fq))
rmseee=sqrt(sum((Fq-weibullee)^2)/length(Fq))
rmsepwm=sqrt(sum((Fq-weibullpwm)^2)/length(Fq))
rmsewasp=sqrt(sum((Fq-weibullwasp)^2)/length(Fq))
rmsewls=sqrt(sum((Fq-weibullwls)^2)/length(Fq))
#NRMD=RMSD/moy
NRMDmm=rmsemm/moy
NRMDmv=rmsemv/moy
NRMDpd=rmsepd/moy
NRMDcf=rmsecf/moy
NRMDkmq=rmsekmq
/moy
NRMDeml=rmseeml/moy
NRMDee=rmseee/moy
NRMDpwm=rmsepwm/moy
NRMDwasp=rmsewasp/moy
NRMDwls=rmsewls/moy
#MEAN
BIAIS ERROR
mbemm=sum(Fq-weibullmm)/length(Fq)
mbemv=sum(Fq-weibullmv)/length(Fq)
mbepd=sum(Fq-weibullpd)/length(Fq)
mbecf=sum(Fq-weibullcf)/length(Fq)
mbekmq=sum(Fq-weibullkmq)/length(Fq)
mbeeml=sum(Fq-weibulleml)/length(Fq)
mbels=sum(Fq-weibullls)/length(Fq)
mbemml=sum(Fq-weibullmml)/length(Fq)
mbeee=sum(Fq-weibullee)/length(Fq)
mbepwm=sum(Fq-weibullpwm)/length(Fq)
mbewasp=sum(F-weibullwasp)/length(Fq)
mbewls=sum(Fq-weibullwls)/length(Fq)
#MEAN
ABSOLUTE ERROR = MEAN ABSOLUTE BIAIS ERROR
maemm=sum(abs(weibullmm-Fq))/length(Fq)
maemv=sum(abs(weibullmv-Fq))/length(Fq)
maepd=sum(abs(weibullpd-Fq))/length(Fq)
maecf=sum(abs(weibullcf-Fq))/length(Fq)
maekmq=sum(abs(weibullkmq-Fq))/length(Fq)
maeeml=sum(abs(weibulleml-Fq))/length(Fq)
maels=sum(abs(weibullls-Fq))/length(Fq)
maemml=sum(abs(weibullmml-Fq))/length(Fq)
maeee=sum(abs(weibullee-Fq))/length(Fq)
maepwm=sum(abs(weibullpwm-Fq))/length(Fq)
maewasp=sum(abs(weibullwasp-Fq))/length(Fq)
maewls=sum(abs(weibullwls-Fq))/length(Fq)
#COEFFICIENT
OF EFFEICIENCY
coemm=sum((weibullmm-mean(v2))^2)/sum((Fq-mean(v2))^2)
coemv=sum((weibullmv-mean(v2))^2)/sum((Fq-mean(v2))^2)
coepd=sum((weibullpd-mean(v2))^2)/sum((Fq-mean(v2))^2)
coecf=sum((weibullcf-mean(v2))^2)/sum((Fq-mean(v2))^2)
coekmq=sum((weibullkmq-mean(v2))^2)/sum((Fq-mean(v2))^2)
coeeml=sum((weibulleml-mean(v2))^2)/sum((Fq-mean(v2))^2)
coels=sum((weibullls-mean(v2))^2)/sum((Fq-mean(v2))^2)
coemml=sum((weibullmml-mean(v2))^2)/sum((Fq-mean(v2))^2)
coeee=sum((weibullee-mean(v2))^2)/sum((Fq-mean(v2))^2)
coepwm=sum((weibullpwm-mean(v2))^2)/sum((Fq-mean(v2))^2)
coewasp=sum((weibullwasp-mean(v2))^2)/sum((Fq-mean(v2))^2)
coewls=sum((weibullwls-mean(v2))^2)/sum((Fq-mean(v2))^2)
#MEAN
ABSOLUTE POURCENTAGE ERREOR MEAN ABSOLUTE RELATIVE ERROR. MAPE = MARE
mapemm=sum(abs((weibullmm-Fq)/Fq))/length(Fq)
mapemv=sum(abs((weibullmv-Fq)/Fq))/length(Fq)
mapepd=sum(abs((weibullpd-Fq)/Fq))/length(Fq)
mapecf=sum(abs((weibullcf-Fq)/Fq))/length(Fq)
mapekmq=sum(abs((weibullkmq-Fq)/Fq))/length(Fq)
mapeeml=sum(abs((weibulleml-Fq)/Fq))/length(Fq)
mapels=sum(abs((weibullls-Fq)/Fq))/length(Fq)
mapemml=sum(abs((weibullmml-Fq)/Fq))/length(Fq)
mapeee=sum(abs((weibullee-Fq)/Fq))/length(Fq)
mapepwm=sum(abs((weibullpwm-Fq)/Fq))/length(Fq)
mapewasp=sum(abs((weibullwasp-Fq)/Fq))/length(Fq)
mapewls=sum(abs((weibullwls-Fq)/Fq))/length(Fq)
#
IA INDICE
#with
higher values manifesting better agreement between the distribution and
observations.
IAmm
=sum(abs(weibullmm-Fq))/(sum(abs(weibullmm-mean(Fq))+abs(Fq-mean(Fq))))
IAmv
=sum(abs(weibullmv-Fq))/(sum(abs(weibullmv-mean(Fq))+abs(Fq-mean(Fq))))
IApd
=sum(abs(weibullpd-Fq))/(sum(abs(weibullpd-mean(Fq))+abs(Fq-mean(Fq))))
IAcf
=sum(abs(weibullcf-Fq))/(sum(abs(weibullcf-mean(Fq))+abs(Fq-mean(Fq))))
IAkmq
=sum(abs(weibullkmq-Fq))/(sum(abs(weibullkmq-mean(Fq))+abs(Fq-mean(Fq))))
IAeml
=sum(abs(weibulleml-Fq))/(sum(abs(weibulleml-mean(Fq))+abs(Fq-mean(Fq))))
IAls
=sum(abs(weibullls-Fq))/(sum(abs(weibullls-mean(Fq))+abs(Fq-mean(Fq))))
IAmml=
sum(abs(weibullmml-Fq))/(sum(abs(weibullmml-mean(Fq))+abs(Fq-mean(Fq))))
IAee
=sum(abs(weibullee-Fq))/(sum(abs(weibullee-mean(Fq))+abs(Fq-mean(Fq))))
IAee
=sum(abs(weibullee-Fq))/(sum(abs(weibullee-mean(Fq))+abs(Fq-mean(Fq))))
IApwm
=sum(abs(weibullpwm-Fq))/(sum(abs(weibullpwm-mean(Fq))+abs(Fq-mean(Fq))))
IAwasp
=sum(abs(weibullwasp-Fq))/(sum(abs(weibullwasp-mean(Fq))+abs(F-mean(Fq))))
IAwls
=sum(abs(weibullwls-Fq))/(sum(abs(weibullwls-mean(Fq))+abs(Fq-mean(Fq))))
#
RELATIVE ROOT MEAN SQUARE ERROR RRMSE
#Best
for RRMSE < 0.1 *** Good for 0.1 <
RRMSE <0.2
#Fair
for 0.2 < RRMSE <0.3 *** Poor for RRMSE >0.3.
RRMSEmm
= sqrt(sum((weibullmm-Fq)^2)/ length(Fq))/(sum(Fq)/length(Fq))
RRMSEmv
= sqrt(sum((weibullmv-Fq)^2)/ length(Fq))/(sum(Fq)/length(Fq))
RRMSEpd
= sqrt(sum((weibullpd-Fq)^2)/ length(Fq))/(sum(Fq)/length(Fq))
RRMSEcf
= sqrt(sum((weibullcf-Fq)^2)/ length(Fq))/(sum(Fq)/length(Fq))
RRMSEkmq
= sqrt(sum((weibullkmq-Fq)^2)/ length(Fq))/(sum(Fq)/length(Fq))
RRMSEeml
= sqrt(sum((weibulleml-Fq)^2)/ length(Fq))/(sum(Fq)/length(Fq))
RRMSEls
= sqrt(sum((weibullls-Fq)^2)/ length(Fq))/(sum(Fq)/length(Fq))
RRMSEmml
= sqrt(sum((weibullmml-Fq)^2)/ length(Fq))/(sum(Fq)/length(Fq))
RRMSEee
= sqrt(sum((weibullee-Fq)^2)/ length(Fq))/(sum(Fq)/length(Fq))
RRMSEpwm
= sqrt(sum((weibullpwm-Fq)^2)/ length(Fq))/(sum(Fq)/length(Fq))
RRMSEwasp
= sqrt(sum((weibullwasp-Fq)^2)/ length(Fq))/(sum(Fq)/length(Fq))
RRMSEwls
= sqrt(sum((weibullwls-Fq)^2)/ length(Fq))/(sum(Fq)/length(Fq))
coemm=sum((weibullmm-mean(v2))^2)/sum((Fq-mean(v2))^2)
#ROOT
MEAN SUQARE ERROR 2
#RMSE
with Average Value of Measure Data.
RMSREmm= sqrt(sum(((Fq-weibullmm)/Fq)^2)/length(Fq))
RMSREmv= sqrt(sum(((Fq-weibullmv)/Fq)^2)/length(Fq))
RMSREpd= sqrt(sum(((Fq-weibullpd)/Fq)^2)/length(Fq))
RMSREcf= sqrt(sum(((Fq-weibullcf)/Fq)^2)/length(Fq))
RMSREkmq= sqrt(sum(((Fq-weibullkmq)/Fq)^2)/length(Fq))
RMSREeml= sqrt(sum(((Fq-weibulleml)/Fq)^2)/length(Fq))
RMSREls= sqrt(sum(((Fq-weibullls)/Fq)^2)/length(Fq))
RMSREmml= sqrt(sum(((Fq-weibullmml)/Fq)^2)/length(Fq))
RMSREee= sqrt(sum(((Fq-weibullee)/Fq)^2)/length(Fq))
RMSREpwm= sqrt(sum(((Fq-weibullpwm)/Fq)^2)/length(Fq))
RMSREwasp= sqrt(sum(((Fq-weibullwasp)/Fq)^2)/length(Fq))
RMSREwls= sqrt(sum(((Fq-weibullwls)/Fq)^2)/length(Fq))
#
MAXIMUM ABSOLUTE RELATIVE ERROR
erMAXmm
= max(abs((mean(Fq)-mean(weibullmm))/mean(Fq)))
erMAXmv
= max(abs((mean(Fq)-mean(weibullmv))/mean(Fq)))
erMAXpd
= max(abs((mean(Fq)-mean(weibullpd))/mean(Fq)))
erMAXcf
= max(abs((mean(Fq)-mean(weibullcf))/mean(Fq)))
erMAXkmq
= max(abs((mean(Fq)-mean(weibullkmq))/mean(Fq)))
erMAXeml
= max(abs((mean(Fq)-mean(weibulleml))/mean(Fq)))
erMAXls
= max(abs((mean(Fq)-mean(weibullls))/mean(Fq)))
erMAXmml
= max(abs((mean(Fq)-mean(weibullmml))/mean(Fq)))
erMAXee
= max(abs((mean(Fq)-mean(weibullee))/mean(Fq)))
erMAXpwm
= max(abs((mean(Fq)-mean(weibullpwm))/mean(Fq)))
erMAXwasp
= max(abs((mean(Fq)-mean(weibullwasp))/mean(Fq)))
erMAXwls
= max(abs((mean(Fq)-mean(weibullwls))/mean(Fq)))
#
ROOT PERCENTAGE ERROR
rpemm=((weibullmm-Fq)/Fq)*100
rpemv=((weibullmv-Fq)/Fq)*100
rpepd=((weibullpd-Fq)/Fq)*100
rpecf=((weibullcf-Fq)/Fq)*100
rpekmq=((weibullkmq-Fq)/Fq)*100
rpeeml=((weibulleml-Fq)/Fq)*100
rpels=((weibullls-Fq)/Fq)*100
rpemml=((weibullmml-Fq)/Fq)*100
rpeee=((weibullee-Fq)/Fq)*100
rpepwm=((weibullpwm-Fq)/Fq)*100
rpewasp=((weibullwasp-Fq)/Fq)*100
rpewls=((weibullwls-Fq)/Fq)*100
#data.frame(rpemm,rpepd,rpecf,rpekmq,rpeeml,rpeee,rpepwm,rpemml,rpeee,rpepwm,rpewasp,rpewls)
# RESULTATS
rbind(kmm,kmv,kpd,kcf,cmm,cmv,cpd,ccf)
rbind(kmq,keml,kls,kmml,
k,cmq,ceml,cls,cmml,c)
rbind(kee,kpwm,kwasp,kwls,cee,cpwm,cwasp,cwls)
mmedist(x3,
"beta")$estimate #moment matching
estimation
fitdist(v2,
distr = "weibull", method = "mse") #maximum spacing estimation
fitdist(v2,
distr = "weibull", method = "mle") #maximum likelihood estimation
qmedist(v2,
"weibull", probs=c(1/3, 2/3))$estimate #quantile matching estimation
#Maximum
goodness-of-fit estimation Stephens (1986)
mgedist(v2,
distr="weibull", gof = "CvM")$estimate #Cramer-von Mises distance (CvM)
mgedist(v2,
distr="weibull", gof = "KS")$estimate #Kolmogorov-Smirnov distance (KS)
mgedist(v2,
distr="weibull", gof = "AD")$estimate #Anderson-Darling distance (AD)
#
Maximum goodness-of-fit estimation Luceno (2006)
mgedist(v2,
distr="weibull", gof = "ADL")$estimate #Anderson-Darling distance Left (ADL)
mgedist(v2,
distr="weibull", gof = "AD2R")$estimate #Anderson-Darling
distance 2 Right(AD2R)
mgedist(v2,
distr="weibull", gof = "AD2L")$estimate #Anderson-Darling
distance 2 Left (AD2L)
mgedist(v2,
distr="weibull", gof = "ADR")$estimate #Anderson-Darling distance Right (ADR)
rbind(coefdetmm,coefdetmv,coefdetpd,coefdetcf,coefdetkmq,coefdeteml,coefdetls,coefdetmml,coefdetee,coefdetpwm,coefdetwasp,coefdetwls)
rbind(R2mm,R2mv,R2pd,R2cf,R2kmq,R2eml,R2ls,R2mml,R2ee,R2pwm,R2wasp,R2wls)
rbind(rmsemm,rmsemv,rmsepd,rmsecf,rmsekmq,rmseeml,rmsels,rmsemml,rmseee,rmsepwm,rmsewasp,rmsewls)
rbind(mbemm,mbemv,mbepd,mbecf,mbekmq,mbeeml,mbels,mbemml,mbeee,mbepwm,mbewasp,mbewls)
rbind(maemm,maemv,maepd,maecf,maekmq,maeeml,maels,maemml,maeee,maepwm,maewasp,maewls)
rbind(coemm,coemv,coepd,coecf,coekmq,coeeml,coels,coemml,coeee,coepwm,coewasp,coewls)
rbind(mapemm,mapemv,mapepd,mapecf,mapekmq,mapeeml,mapels,mapemml,mapeee,mapepwm,mapewasp,mapewls)
rbind(IAmm,IAmv,IApd,IAcf,IAkmq,IAeml,IAls,IAmml,IAee,IApwm,IAwasp,IAwls)
rbind(RRMSEmm,RRMSEmv,RRMSEpd,RRMSEcf,RRMSEkmq,RRMSEeml,RRMSEls,RRMSEmml,RRMSEee,RRMSEpwm,RRMSEwasp,RRMSEwls)
rbind(RMSREmm,RMSREmv,RMSREpd,RMSREcf,RMSREkmq,RMSREeml,RMSREls,RMSREmml,RMSREee,RMSREpwm,RMSREwasp,RMSREwls)
rbind(erMAXmm,erMAXmv,erMAXpd,erMAXcf,erMAXkmq,erMAXeml,erMAXls,erMAXmml,erMAXee,erMAXpwm,erMAXwasp,erMAXwls)
rbind(NRMDmm,NRMDmv,NRMDpd,NRMDcf,NRMDkmq,NRMDeml,NRMDee,NRMDpwm,NRMDwasp,NRMDwls)
#GRAPH
GROUP 1
#layout(c(1,2))
#par(mfrow=c(1,2))
a=hist(weibullmmn); dev.off()
b=hist(weibullmv); dev.off()
c=hist(weibullj); dev.off()
d=hist(weibullwasp);
dev.off()
a=hist(weibullnm);
dev.off()
b=hist(weibullmv);
dev.off()
m=hist(weibullmm);
dev.off()
k=hist(weibullwasp);
dev.off()
c=hist(weibullpd);
dev.off()
d=hist(weibullcf);
dev.off()
e=hist(weibullkmq);
dev.off()
f=hist(weibulleml);
dev.off()
g=hist(weibullls);
dev.off()
h=hist(weibullmml);
dev.off()
i=hist(weibullee);
dev.off()
j=hist(weibullpwm);
dev.off()
l=hist(weibullwls);
dev.off()
m=hist(weibullmm);
dev.off()
#col=terrain.colors(5))
#col=topo.colors(500)
#col=cm.colors(5))
#col=rainbow(5))
#col=heat.colors(5)
a=hist(weibullmmn); dev.off()
b=hist(weibullmv); dev.off()
c=hist(weibullj); dev.off()
d=hist(weibullwasp);
dev.off()
y=hist(v2, prob=T)
plot(-1,-1,xlim=c(0,22),
ylim=c(0,7),xlab="Wind speed(m/s)",ylab="Probability density
function (%)")
grid(col="black",
lty="solid", lwd=1.5)
#abline(h=0,
lty=2); abline(v=0, lty=2)
#require(graphics)
#grid(5,6,col
= "lightgray", lty = "dotted",lwd = par("lwd"),
equilogs =F)
par(new=T);hist(v2,
col= "yellow" ,main=NA,
xlab=NA,ylab=NA,axes=FALSE,border="red")
#
http://www.stat.columbia.edu/~tzheng/files/Rcolor.pdf
#
heat.colors(4, alpha=1) topo.colors(200)
#
hsv(h = 1, s = 0.8, v = 0.9)
#
xfit<-seq(min(v2),max(v2),length=40)
#
yfit<-dnorm(xfit,mean=mean(v2),sd=sd(v2)) #lines(xfit, yfit,
col="blue", lwd=2)
par(new=T);plot(y$mids,weibullmmn,type="b",
pch=15,lty=1, lwd=2,cex=1,col="red",axes=FALSE,add=T,ylab=NA,xlab=NA)
par(new=T);plot(y$mids,weibullmv,type="b",
pch=16,lty=1,
lwd=2,cex=1,col="mediumblue",axes=FALSE,add=T,ylab=NA,xlab=NA)
par(new=T);plot(y$mids,weibullj,type="b",
pch=17,lty=1,
lwd=2,cex=1,col="deeppink",axes=FALSE,add=T,ylab=NA,xlab=NA)
par(new=T);plot(y$mids,weibullwasp,type="b",
pch=20,lty=1,
lwd=2,cex=1,col="cyan",axes=FALSE,add=T,ylab=NA,xlab=NA)
data.frame(weibullmmn,weibullmv,weibullj,weibullwasp,y$mids,y$density)
plot(-1,-1,xlim=c(0,20),
ylim=c(0,20),xlab="",ylab="")
legend("topright",c("Moment
Method" , "Maximum
Likelihood",
"Empirical Method of
Jestus", "WAsP Method"),
lty=c(1,1,1,1),
col=c("red",
"mediumblue", "deeppink", "cyan"),
lwd=c(2,2,2,2), bty="n",
cex=c(1,1,1,1),
pch=c(15,16,17,20) )
#CUMULATIVE
FUNCTION DISTRIBUTION
#FCmm=1-exp(-((v2/cmm)^kmm)) # Long Way
FCmmn=1-exp(-((y$mids/c)^k)) # Short Way
FCmv=1-exp(-((y$mids/cmv)^kmv))
FCj=1-exp(-((y$mids/cmm)^kmm))
FCwasp=1-exp(-((y$mids/cwasp)^kwasp))
data.frame(FCmmn,FCmv,FCj,FCwasp,y$mids,y$density)
plot(y$mids,FCmm,type="b",
pch=15,lty=1, lwd=2,cex=1,col="red")
par(new=T)
plot(-1,-1,xlim=c(0,22),
ylim=c(0,1),xlab="Wind speed(m/s)",ylab="Cumulative distribution
function (%)")
grid(col="black",
lty="solid", lwd=1.5)
par(new=T);plot(y$mids,FCmmn,type="b",
pch=15,lty=1, lwd=2,cex=1,col="red",axes=FALSE,add=T,ylab=NA,xlab=NA)
par(new=T);plot(y$mids,FCmv,type="b",
pch=16,lty=1,
lwd=2,cex=1,col="mediumblue",axes=FALSE,add=T,ylab=NA,xlab=NA)
par(new=T);plot(y$mids,FCj,type="b",
pch=17,lty=1,
lwd=2,cex=1,col="deeppink",axes=FALSE,add=T,ylab=NA,xlab=NA)
par(new=T);plot(y$mids,FCwasp,type="b",
pch=20,lty=1,
lwd=2,cex=1,col="darkgoldenrod1",axes=FALSE,add=T,ylab=NA,xlab=NA)
barplot(y$density~y$mids,
col="blue3", xlab="", ylab="")
par(new=T);
plot(weibullm~y$mids, type="b", pch=19, col="red",
axes=FALSE, add=T, xlan=NA, ylab=NA)
par(new=T)
plot(y$mids,weibullmm,type="b",
pch=20,lty=1, lwd=2,cex=1,col="red",axes=FALSE,add=T,ylab=NA,xlab=NA)
#pch=20
plot(v2,weibullmm,pch=3,cex=0,col="red",axes=FALSE,add=T,ylab=NA,xlab=NA)
par(new=T)
curve(dweibull(x,kmm,
cmm), lty=1,lwd=2,col=" gold4", add=T)
par(new=T)
plot(v2,weibullmv,pch=4,cex=0,col="darkmagenta",axes=FALSE,add=T,ylab=NA,xlab=NA)
x=b$density
curve(dweibull(x,
kmv, cmv), lty=1,lwd=2,col="
hotpink1 ", add=T)
par(new=T)
plot(v2,weibullpd,pch=0,cex=0,col="
yellow",axes=FALSE,add=T,ylab=NA,xlab=NA)
x=c$density
curve(dweibull(x,
kpd, cpd), lty=1,lwd=2,col="
yellow ", add=T)
par(new=T)
plot(v2,weibullcf,pch=1,cex=0,col="magenta4",axes=FALSE,add=T,ylab=NA,xlab=NA)
x=d$density
curve(dweibull(x,
kcf, ccf), lty=1,lwd=2,col="
lightsalmon ", add=T)
par(new=TRUE)
plot(v2,weibullkmq,pch=3,cex=0,col="orange",axes=FALSE,add=T,ylab=NA,xlab=NA)
x=e$density
curve(dweibull(x,
kmq, cmq), lty=1,lwd=2,col= " red2
", add=T)
par(new=T)
plot(v2,weibulleml,pch=4,cex=0,col="brown",axes=FALSE,add=T,ylab=NA,xlab=NA)
x=f$density
curve(dweibull(x,
keml, ceml), lty=1,lwd=2,col=
" paleturquoise1 "
, add=T)
par(new=T)
plot(v2,weibullls,pch=0,cex=0,col="yellow",axes=FALSE,add=T,ylab=NA,xlab=NA)
x=g$density
curve(dweibull(x,
kls, cls), lty=1,lwd=2,col="
mediumblue ", add=T)
par(new=T)
plot(v2,weibullmml,pch=3,cex=0,col="red",axes=FALSE,add=T,ylab=NA,xlab=NA)
x=h$density
curve(dweibull(x,
kmml, cmml), lty=1,lwd=2,col="
turquoise1 ", add=T)
par(new=T)
plot(v2,weibullee,pch=3,cex=0,col="red",axes=FALSE,add=T,ylab=NA,xlab=NA)
x=i$density
curve(dweibull(x,
kee, cee), lty=1,lwd=2,col="
green1 ", add=T)
par(new=T)
plot(v2,weibullpwm,pch=4,cex=0,col="green",axes=FALSE,add=T,ylab=NA,xlab=NA)
x=j$density
curve(dweibull(x,
kpwm, cpwm), lty=1,lwd=2,
col=" snow3 ", add=T)
par(new=T)
plot(v2,weibullwasp,pch=0,cex=0,col="yellow",axes=FALSE,add=T,ylab=NA,xlab=NA)
x=k$density
curve(dweibull(x,
kwasp, cwasp), lty=1,lwd=2, col= "
mediumorchid1 ", add=T)
par(new=T)
plot(v2,weibullwls,pch=1,cex=0,col="purple",axes=FALSE,add=T,ylab=NA,xlab=NA)
x=l$density
curve(dweibull(x,
kwls, cwls), lty=1,lwd=2, col= "
chocolate1 ", add=T)
plot(-1,-1,xlim=c(0,20),
ylim=c(0,20),xlab="",ylab="")
legend("topright",c(
"Empirical Method of Jestus","Maximum Likelihood",
"Quartiles
Method","Empirical Method of Lysen",
"Graphical
Method","Modified Maximum Likelihood",
"Equivalent Energy
Method","Probability Weighted Moments Method",
"WAsP
Method","Weighted Least Square Method"),
lty=c(1,1,1,1,1,1,1,1,1,1),
col=c("gold4",
"hotpink1", "yellow",
"lightsalmon","red2","paleturquoise1",
"mediumblue","
turquoise1",
"green1","snow3","mediumorchid1",
"chocolate1"),
lwd=c(5,5,5,5,5,5,5,5,5,5),
bty="n", cex=1.8)
plot(-1,-1,xlim=c(0,20),
ylim=c(0,20),xlab="",ylab="")
legend("top",c("Vm
=9.08 m/s","sd = 3.71","skew = -0.08","kurt =
-0.65", "min =0.4m/s", "max = 20.8m/s" ),
cex=1.7,bg="white",box.col="black")
#horiz=T
#png(filename="tes.png",width=5000,height=3205,units="px")
#legend("bottomleft",
inset=.02, title="Number of Cylinders",
#c("4","6","8"),
fill=topo.colors(3), horiz=TRUE, cex=0.8
#NEW
WEIBULL DISTRIBUTION
#https://stackoverrun.com/fr/q/10266756
#data<-read.csv(file("clipboard"),header=T,sep="\t",
dec=",")
#names(data);
attach(data)
#set.seed(1938)
#X=seq(0,100,length.out=1000)
#Y=cumsum(rnorm(1000))
#a2
<- data.frame(year = X, values = Y)
#require(ggplot2)
#ggplot(a2,
aes(x = year, y = values, color = values )) +
# geom_line(size = 0.5) +
# geom_smooth(aes(color=..y..), size=1.5,
se=FALSE) +
# scale_colour_gradient2(low =
"blue", mid = "yellow" , high = "red",
#
midpoint=median(a2$values)) +
# theme_bw()
x=hist (V_60); x
#APPUREMENT DE LA BASE
table(V_40 > V_30)
F=18135
T=62211
Tt=F+T; FT=T/Tt*100;
FF=F/Tt*100;
cbind(FT,FF)
nd=V_40
> V_30
data=data.frame(nd,
V_40, V_30)
write.table(data,
"data.txt", sep="\t")
#dput(goubet,
file = "",control = c("keepNA", "keepInteger",
"niceNames", "showAttributes"))
#dget(file,
keep.source = FALSE)
#WindRose.R
goubet<-read.csv(file("clipboard"),header=T,sep="\t",
dec=",",row.names=1)
#data(wind_data)
require(windrose)
wind_rose
<- windrose(goubet, spd = w60, dir = d60)
plot(wind_rose)
#
Programm Initiaisation
plot.windrose
<- function(data,
spd,
dir,
spdres = 2,
dirres = 30,
spdmin = 2,
spdmax = 20,
spdseq = NULL,
palette =
"YlGnBu",
countmax = NA,
debug = 0){
# Look to see what data was passed in to the
function
if (is.numeric(spd) & is.numeric(dir)){
# assume that we've been given vectors of
the speed and direction vectors
data <- data.frame(spd = spd,
dir = dir)
spd = "spd"
dir = "dir"
} else if (exists("data")){
# Assume that we've been given a data
frame, and the name of the speed
# and direction columns. This is the format
we want for later use.
}
# Tidy up input data ----
n.in <- NROW(data)
dnu <- (is.na(data[[spd]]) |
is.na(data[[dir]]))
data[[spd]][dnu] <- NA
data[[dir]][dnu] <- NA
# figure out the wind speed bins ----
if (missing(spdseq)){
spdseq <- seq(spdmin,spdmax,spdres)
} else {
if (debug >0){
cat("Using custom speed bins
\n")
}
}
# get some information about the number of
bins, etc.
n.spd.seq <- length(spdseq)
n.colors.in.range <- n.spd.seq - 1
# create the color map
spd.colors <-
colorRampPalette(brewer.pal(min(max(3,
n.colors.in.range),
min(9,
n.colors.in.range)),
palette))(n.colors.in.range)
if
(max(data[[spd]],na.rm = TRUE) > spdmax){
spd.breaks <- c(spdseq,
max(data[[spd]],na.rm =
TRUE))
spd.labels <-
c(paste(c(spdseq[1:n.spd.seq-1]),
'-',
c(spdseq[2:n.spd.seq])),
paste(spdmax,
"-",
max(data[[spd]],na.rm
= TRUE)))
spd.colors <- c(spd.colors, "grey50")
} else{
spd.breaks <- spdseq
spd.labels <-
paste(c(spdseq[1:n.spd.seq-1]),
'-',
c(spdseq[2:n.spd.seq]))
}
data$spd.binned <- cut(x = data[[spd]],
breaks = spd.breaks,
labels = spd.labels,
ordered_result = TRUE)
# clean up the data
data. <- na.omit(data)
# figure out the wind direction bins
dir.breaks <- c(-dirres/2,
seq(dirres/2, 360-dirres/2,
by = dirres),
360+dirres/2)
dir.labels <-
c(paste(360-dirres/2,"-",dirres/2),
paste(seq(dirres/2, 360-3*dirres/2, by =
dirres),
"-",
seq(3*dirres/2,
360-dirres/2, by = dirres)),
paste(360-dirres/2,"-",dirres/2))
# assign each wind direction to a bin
dir.binned <- cut(data[[dir]],
breaks = dir.breaks,
ordered_result = TRUE)
levels(dir.binned) <- dir.labels
data$dir.binned <- dir.binned
# Run debug if required ----
if (debug>0){
cat(dir.breaks,"\n")
cat(dir.labels,"\n")
cat(levels(dir.binned),"\n")
}
# deal with change in ordering introduced
somewhere around version 2.2
if(packageVersion("ggplot2") >
"2.2"){
cat("Hadley broke my code\n")
data$spd.binned = with(data,
factor(spd.binned, levels = rev(levels(spd.binned))))
spd.colors = rev(spd.colors)
}
# create the plot ----
p.windrose <- ggplot(data = data,
aes(x = dir.binned,
fill = spd.binned))
+
geom_bar() +
scale_x_discrete(drop = FALSE,
labels = waiver()) +
coord_polar(start = -((dirres/2)/360) *
2*pi) +
scale_fill_manual(name = "Wind Speed
(m/s)",
values = spd.colors,
drop = FALSE) +
theme(axis.title.x
= element_blank())
# adjust axes if required
if (!is.na(countmax)){
p.windrose <- p.windrose +
ylim(c(0,countmax))
}
# print the plot
print(p.windrose)
# return the handle to the wind rose
return(p.windrose)
}
#Fin
Programm
goubet<-read.csv(file("clipboard"),header=T,sep="\t",
dec=",",row.names=1)
attach(goubet)
require(ggplot2)
require(RColorBrewer)
p2 <-
plot.windrose(spd = V_60,
dir = DIR_60)
p2 + theme_bw()
p2 <-
plot.windrose(spd = V_60,
dir = DIR_60,
spdseq =
c(0,3,6,9,12,15,18,21))
p2 + theme_bw()
# R.4.02
Version
goubet<-read.csv(file("clipboard"),header=T,sep="\t",
dec=",",row.names=1)
#Rorganisation
data before on N S E W
require(climatol)
data(windfr)
rosavent(windfr,
4, 4, ang=-3*pi/16, main="Annual windrose")
attach(goubet)
names(goubet)
#https://randroll.wordpress.com/2018/08/30/a-wind-rose-with-openair-package/
library(openair)
windRose(goubet,
ws = "V", wd = "D", statistic = "prop.count",
angle =25)
windRose(goubet,
ws = "V_60", wd = "DIR_60")
windRose(goubet,
ws = "V_60", wd ="DIR_60",paddle = F, border = T)
windRose(goubet,
ws = "V_60", wd ="DIR_60",
paddle = F, border = T,
breaks = c(0, 4, 8, 12, 16),
key.position = 'right')
library(viridis)
windRose(goubet,
ws = "W", wd ="D",
paddle = F, border = F,
breaks = c(0, 4, 8, 12, 16),
key.position = 'bottom',
col =rev(viridis(5)), type='X' ) #
classes
goubet<-read.csv(file("clipboard"),header=T,sep="\t",
dec=",")
#
"right", "left", "top", "bottom"
windRose(goubet,
ws = "W", wd ="D",
paddle = F, border = T,
breaks = c(0, 4, 8, 12, 16),
key.position = "bottom",
col =c("red",
"orange", "yellow", "green",
"darkgreen"),type = 'X',
grid.line = list(value = 10, lty = 1,
col = "blue"), annotate = TRUE, angle.scale =315,
key.header = "Wind Speed",
angle.scale = 45)
windRose(goubet,
ws = "V_60", wd ="DIR_60",
paddle = F, border = T,
breaks = c(0, 0.5, 2.4, 3.6, 5.7, 8.8,
11.1),
key.position = 'right',
type = 'daylight')
windRose(goubet,type
= "year")
windRose(gru,
ws = 'ws', wd = 'wd',
paddle = F, border = T,
breaks = c(0, 0.5, 2.4, 3.6, 5.7, 8.8,
11.1),
type = 'month',
grid.line = 10, # show grid line each 10%
annotate = F) # Don't display mean and calm
#https://randroll.wordpress.com/2018/08/30/a-wind-rose-with-openair-package/
polarFreq(mydata)
#
wind frequencies by year
if
(FALSE) polarFreq(mydata, type = "year")
#
mean SO2 by year, showing only bins with at least 2 points
if
(FALSE) polarFreq(mydata, pollutant = "so2", type = "year",
statistic = "mean", min.bin = 2)
#
weighted mean SO2 by year, showing only bins with at least 2 points
if
(FALSE) polarFreq(mydata, pollutant = "so2", type = "year",
statistic = "weighted.mean",
min.bin = 2)
#windRose
for just 2000 and 2003 with different colours
if
(FALSE) polarFreq(subset(mydata, format(date, "%Y") %in% c(2000,
2003)),
type = "year",
cols = "jet")
#
user defined breaks from 0-700 in intervals of 100 (note linear scale)
if
(FALSE) polarFreq(mydata, breaks = seq(0, 700, 100))
#
more complicated user-defined breaks - useful for highlighting bins
#
with a certain number of data points
if
(FALSE) polarFreq(mydata, breaks = c(0, 10, 50, 100, 250, 500, 700))
#
source contribution plot and use of offset option
if
(FALSE) polarFreq(mydata, pollutant = "pm25", statistic
="weighted.mean", offset = 50, ws.int = 25, trans = FALSE)
#https://davidcarslaw.github.io/openair/reference/polarFreq.html
polarPlot(openair::mydata,
pollutant = "nox")
if
(FALSE) {
# polarPlots by year on same scale
polarPlot(mydata, pollutant =
"so2", type = "year", main = "polarPlot of so2")
# set minimum number of bins to be used to
see if pattern remains similar
polarPlot(mydata, pollutant =
"nox", min.bin = 3)
# plot by day of the week
polarPlot(mydata, pollutant =
"pm10", type = "weekday")
# show the 95% confidence intervals in the
surface fitting
polarPlot(mydata, pollutant =
"so2", uncertainty = TRUE)
# Pair-wise statistics
# Pearson correlation
polarPlot(mydata, pollutant =
c("pm25", "pm10"), statistic = "r")
# Robust regression slope, takes a bit of
time
polarPlot(mydata, pollutant =
c("pm25", "pm10"), statistic = "robust.slope")
# Least squares regression works too but it
is not recommended, use robust
# regression
# polarPlot(mydata, pollutant =
c("pm25", "pm10"), statistic = "slope")
}
#https://davidcarslaw.github.io/openair/reference/polarPlot.html
#
windRose in 10 degree intervals with gridlines and width adjusted
if
(FALSE) {
windRose(goubet, angle = 10, width = 0.2,
grid.line = 1)
}
# pollutionRose of nox
pollutionRose(goubet,
pollutant = "nox")
if
(FALSE) {
pollutionRose(mydata, pollutant =
"pm10", type = "year", statistic = "prop.mean")
}
##
example of comparing 2 met sites
##
first we will make some new ws/wd data with a postive bias
mydata$ws2
= mydata$ws + 2 * rnorm(nrow(mydata)) + 1
mydata$wd2
= mydata$wd + 30 * rnorm(nrow(mydata)) + 30
##
need to correct negative wd
id
<- which(mydata$wd2 < 0)
mydata$wd2[id]
<- mydata$wd2[id] + 360
##
results show postive bias in wd and ws
pollutionRose(mydata,
ws = "ws", wd = "wd", ws2 = "ws2", wd2 =
"wd2")
#https://davidcarslaw.github.io/openair/reference/windRose.html
#
first create a true POSIXCT timestamp from the date and hour columns
data.in$timestamp
<- as.POSIXct(paste0(data.in$date, " ", data.in$hr),
tz = "GMT",
format =
"%m/%d/%Y %H:%M")
#
Convert the time stamp to years and months
data.in$Year
<- as.numeric(format(data.in$timestamp, "%Y"))
data.in$month
<- factor(format(data.in$timestamp, "%B"),
levels = month.name)
#
recreate p.wr2, so that includes the new data
p.wr2
<- plot.windrose(data = data.in,
spd = "ws.80",
dir = "wd.80")
#
now generate the faceting
p.wr3
<- p.wr2 + facet_wrap(~month,
ncol = 3)
#
and remove labels for clarity
p.wr3
<- p.wr3 + theme(axis.text.x = element_blank(),
axis.title.x =
element_blank())
#https://stackoverflow.com/questions/17266780/wind-rose-with-ggplot-r
#
POSSIBILITY OF GRAPH WIND
ROSE
library(clifro)
wind_df
= data.frame(wind_speeds = c(rweibull(80, 2, 4), rweibull(20, 3, 9)),
wind_dirs = c(rnorm(80,
135, 55), rnorm(20, 315, 35)) %% 360,
station =
rep(rep(c("Station A", "Station B"), 2),
rep(c(40,
10), each = 2)))
with(wind_df,
windrose(wind_speeds, wind_dirs))
with(wind_df,
windrose(wind_speeds, wind_dirs,
speed_cuts = c(3, 6, 9,
12),
legend_title =
"Wind Speed\n(m/s)",
legend.title.align = .5,
ggtheme =
"bw",
col_pal =
"Greys"))
with(wind_df,
windrose(wind_speeds, wind_dirs, "Artificial Auckland Wind"))
with(wind_df,
windrose(wind_speeds, wind_dirs, station, n_col = 2))
library(ggplot2)
ggsave("my_windrose.png")
##
End(Not run)
# HISTOGRAM 2
#https://bio304-class.github.io/bio304-fall2017/ggplot-bivariate.html
require(tidyverse)
setosa
<- filter(iris, Species == "setosa")
dim(setosa)
setosa.or.virginica
<- filter(iris, Species == "setosa" | Species ==
"virgnica")
dim(setosa.or.virginica)
big.setosa
<- filter(iris, Species == "setosa" & Sepal.Width > 3.5)
dim(big.setosa)
ggplot(setosa) +
geom_point(aes(x = Sepal.Length, y =
Sepal.Width))
p
<- ggplot(setosa, mapping = aes(x = Sepal.Length, y = Sepal.Width))
p
+ geom_point()
p
+ geom_point() + theme_bw()
p
+ geom_point() + theme_classic()
p
+ geom_point() + theme_classic() + theme(aspect.ratio = 1)
my.theme
<- theme_classic() +
theme(aspect.ratio = 1)
sepal.labels
<- labs(x = "Sepal Length (cm)", y = "Sepal Width (cm)",
title = "Relationship
between Sepal Length and Width",
caption = "data from
Anderson (1935)")
p
+
geom_point() +
sepal.labels + labs(subtitle = "I.
setosa data only") +my.theme
p
+
geom_jitter() + # using geom_jitter to avoid overplotting of
points
geom_smooth() +
sepal.labels + labs(subtitle = "I.
setosa data only") +
my.theme
p
+
geom_jitter() + # using geom_jitter to avoid overplotting of
points
geom_smooth(method = "lm", color =
"red") + # using linear model ("lm")
sepal.labels + labs(subtitle = "I.
setosa data only") +
my.theme
#Bivariate density plots
p
<- ggplot(setosa, mapping = aes(x = Sepal.Length, y = Sepal.Width))
p
+
stat_density_2d(aes(fill = ..level..), geom =
"polygon") +
sepal.labels + labs(subtitle = "I.
setosa data only") +
my.theme
p
+
stat_density_2d(aes(fill = ..level..), geom =
"polygon") +
# lavenderblush is the HTML standard name for
a light purplish-pink color
scale_fill_continuous(low="lavenderblush",
high="red") +
sepal.labels + labs(subtitle = "I.
setosa data only") +
my.theme
p
+
geom_density2d() +
sepal.labels + labs(subtitle = "I.
setosa data only") +
my.theme
p
+
geom_density_2d() +
geom_jitter(alpha=0.35) +
sepal.labels + labs(subtitle = "I.
setosa data only") +
my.theme
all.length.vs.width
<- ggplot(iris, aes(x = Sepal.Length, y = Sepal.Width))
all.length.vs.width
+
geom_point(aes(color = Species, shape =
Species), size = 2, alpha = 0.6) +
sepal.labels + labs(subtitle = "All
species") +
my.theme
#
HISTOGRAM
#
Color housekeeping
library(RColorBrewer)
rf
<- colorRampPalette(rev(brewer.pal(11,'Spectral')))
r
<- rf(32)
#
Create normally distributed data for plotting
x
<- rnorm(mean=1.5, 5000)
y
<- rnorm(mean=1.6, 5000)
df
<- data.frame(x,y)
#
Plot
plot(df,
pch=16, col='black', cex=0.5)
library(gplots)
#
Default call
h2
<- hist2d(df)
#
OPTION 1: hexbin from package 'hexbin'
library(hexbin)
#
Create hexbin object and plot
h
<- hexbin(df)
plot(h)
plot(h,
colramp=rf)
#
hexbinplot function allows greater flexibility
hexbinplot(y~x,
data=df, colramp=rf)
#
Setting max and mins
hexbinplot(y~x,
data=df, colramp=rf, mincnt=2, maxcnt=60)
#
Scaling of legend - must provide both trans and inv functions
hexbinplot(y~x,
data=df, colramp=rf, trans=log, inv=exp)
#
Coarser binsizing and add colouring
h2
<- hist2d(df, nbins=25, col=r)
#
Scaling with log as before
h2
<- hist2d(df, nbins=25, col=r, FUN=function(x) log(length(x)))
#
OPTION 3: stat_bin2d from package 'ggplot'
library(ggplot2)
#
Default call (as object)
p
<- ggplot(df, aes(x,y))
h3
<- p + stat_bin2d()
h3
#
Default call (using qplot)
qplot(x,y,data=df,
geom='bin2d')
#
Add colouring and change bins
h3
<- p + stat_bin2d(bins=25) + scale_fill_gradientn(colours=r)
h3
#
Log scaling
h3
<- p + stat_bin2d(bins=25) + scale_fill_gradientn(colours=r,
trans="log")
h3
#
OPTION 4: kde2d from package 'MASS'
#
Not a true heatmap as interpolated (kernel density estimation)
library(MASS)
#
Default call
k
<- kde2d(df$x, df$y)
image(k,
col=r)
#
Adjust binning (interpolate - can be computationally intensive for large
datasets)
k <- kde2d(df$x,
df$y, n=200)
image(k,
col=r)
#
Addendum: 2D Histogram + 1D on sides (from Computational ActSci w R) #######
#http://books.google.ca/books?id=YWcLBAAAQBAJ&pg=PA60&lpg=PA60&dq=kde2d+log&source=bl&ots=7AB-RAoMqY&sig=gFaHSoQCoGMXrR9BTaLOdCs198U&hl=en&sa=X&ei=8mQDVPqtMsi4ggSRnILQDw&redir_esc=y#v=onepage&q=kde2d%20log&f=false
h1
<- hist(df$x, breaks=25, plot=F)
h2
<- hist(df$y, breaks=25, plot=F)
top
<- max(h1$counts, h2$counts)
k <- kde2d(df$x,
df$y, n=25)
# margins
oldpar <- par()
par(mar=c(3,3,1,1))
layout(matrix(c(2,0,1,3),2,2,byrow=T),c(3,1),
c(1,3))
image(k,
col=r) #plot the image
par(mar=c(0,2,1,0))
barplot(h1$counts,
axes=F, ylim=c(0, top), space=0, col='red')
par(mar=c(2,0,0.5,1))
barplot(h2$counts,
axes=F, xlim=c(0, top), space=0, col='red', horiz=T)
#https://www.r-bloggers.com/5-ways-to-do-2d-histograms-in-r/
library(squash)
set.seed(123)
x
<- rnorm(10000)
y
<- rnorm(10000) + x
hist2(x,
y)
##
pseudo-log-scale color breaks:
hist2(x,
y, breaks = prettyLog, key.args = list(stretch = 4))
##
log-scale color breaks; the old way using 'base'
##
(notice box removal to make space for the vertical color key)
hist2(x,
y, base = 2, key = vkey, nz = 5, bty = 'l')
install.packages("devtools")
library(devtools)
install_github("easyGgplot2",
"kassambara")
library(easyGgplot2)
#
Multiple histograms on the same plot
#
Color the histogram plot by the groupName "sex"
ggplot2.histogram(data=weight,
xName='weight',
groupName='sex',
legendPosition="top")
#
Histogram plots with semi-transparent fill.
#
alpha is the transparency of the overlaid color
ggplot2.histogram(data=weight,
xName='weight',
groupName='sex',
legendPosition="top",
alpha=0.5 )
#
Histogram plots with mean lines
ggplot2.histogram(data=weight,
xName='weight',
groupName='sex',
legendPosition="top",
alpha=0.5, addDensity=TRUE,
addMeanLine=TRUE,
meanLineColor="white", meanLineSize=1.5)
library(MASS)
set.seed(101)
my_data
<- rnorm(250, mean=1, sd=0.45) #
unkonwn distribution parameters
fit
<- fitdistr(V_60, densfun="weibull") # we assume my_data ~ Normal(?,?)
fit
hist(V_60,
pch=20, breaks="sturges", prob=T, main="",
col="green")
x11()
a=hist(weibullmm)
x=a$density
curve(dweibull(x,
2.6, 10.21), lty=1,col="blue",
add=T,pch=2)
b=hist(V_60)
plot(a$density,b$density,type='l')
lowess","supsmu","raw","intervals
plsmo(weibullmm,
y=V_60, "lowess",type="b")
log_likelihood
<- function(params) { -sum(dnorm(my_data, params[1], params[2], log=TRUE)) }
fit2
<- optim(c(0,1), log_likelihood) #
c(0,1) are just initial guesses
fit2
hist(my_data,
pch=20, breaks=25, prob=TRUE)
curve(dnorm(x,
fit2$par[1], fit2$par[2]), col="blue", lwd=6, add=T) #
optim fit
curve(dnorm(x,
fit$estimate[1], fit$estimate[2]), col="red", lwd=2, add=T) # fitdistr fit
hist(V_60)
a=dweibull(V_60,
shape=2.645224, scale = 10.218302, log = FALSE)
plot(a,
V_60, type="b")
h<-hist(weibullmm,breaks=15)
xhist<-c(min(h$breaks),h$breaks)
yhist<-c(0,h$density,0)
xfit<-seq(min(V_60),max(V_60),length=40)
yfit<-dnorm(xfit,mean=mean(V_60),sd=sd(V_60))
plot(xhist,yhist,type="s",ylim=c(0,max(yhist,yfit)),
main="Normal pdf and
histogram",
add=T)
lines(xfit,yfit,
col="red")
library(MASS)
set.seed(101)
my_data
<- rnorm(250, mean=1, sd=0.45) #
unkonwn distribution parameters
fit
<- fitdistr(my_data, densfun="normal") # we assume my_data ~ Normal(?,?)
fit
hist(my_data,
pch=20, breaks=25, prob=TRUE, main="")
curve(dnorm(x,
fit$estimate[1], fit$estimate[2]), col="red", lwd=2, add=T)
library(MASS)
set.seed(101)
my_data
<- rnorm(250, mean=1, sd=0.45) #
unkonwn distribution parameters
fit
<- fitdistr(V_60, densfun="normal") # we assume my_data ~ Normal(?,?)
fit
hist(V_60,
pch=20, breaks=10, prob=TRUE, main="")
curve(dnorm(x,
fit$estimate[1], fit$estimate[2]), col="red", lwd=2, add=T)
curve(dweibull(x,
2.93, 10.07, log=F),
col="blue", lwd=6, add=T) # optim fit
dweibull(x,
2.93, 10.07, log=F)
library(fitdistrplus)
fit_w <- fitdist(V_60, "weibull")
fitdistr(V_60,densfun=dweibull,start=list(scale=1,shape=2))##
fitting
#
WEIBULL ASSESMMENT WIND SPEED OF GOUBET
data=write.table(weibullmm,
"data.txt")
goubet<-read.csv(file("clipboard"),header=T,sep="\t",
dec=",",row.names=1)
attach(goubet)
str(goubet)
names(goubet)
dir()
#sample<-
rweibull(V_60, shape=2.645224 , scale =
10.2183) +0
#sqrt(sum((sample-V_60)^2)/length(V_60))#
RMSE
#sum((emp-weib)^2)/length(emp) # MINIMIM SQUARE ERROR
#The
WEIBULL Distribution
kmm=(sd/mean(V_60))^-1.086
cmm=moy/gamma((1+(1/kmm)))
weibullmm=(kmm/cmm)*(V_60/cmm)^(kmm-1)*exp(-((V_60/cmm)^kmm))
#The
RAYLEIGH Distribution
#install.packages("VGAM")
#require(VGAM)
#drayleigh(x,
scale = 1, log = FALSE)
#ray
= ((pi*V_60)/
(2*(mean(V_60)^2))) * exp(-(pi/4)*((V_60/mean(V_60))^2))
rayleigh
= (pi/2) * (V_60/ (mean(V_60)^2)) * exp(
(-pi/4)* (V_60/mean(V_60))^2)
#The LOGNORMAL
Distribution
#log(x) = ln(x) / ln(10)
ou lnx = logx x ln 10
require(stats)
#dlnorm(x,
meanlog = 0, sdlog = 1, log = FALSE)
lognormal
= (1/(V_60*sd(log(V_60))*sqrt(2*pi) )) *(exp( -(log(V_60/mean(log(V_60)))
^2) / (2*(sd(log(V_60))^2 ))) )
#The
GAMMA Distribution
gamma=((
(V_60 ^(kmm-1)) / ( (cmm^kmm ) *
gamma(kmm) )) ) * (exp(-V_60/cmm))
#The
Burr distribution
#The
Frechet distribution
#The
Gumbel distribution
#The
extremum distribution
#hist(V_60,
prob=T)
a=hist(weibullmm,
prob=T); dev.off()
b=hist(lognormal,
prob=T); dev.off()
c=hist(rayleigh,
prob=T); dev.off()
d=hist(MSEF,
prob=T); dev.off()
e=hist(gamma,
prob=T); dev.off()
dat=cbind(a$density,b$density,c$density,d$density,e$density);
colnames(dat) <- c("weib", "log", "ray",
"mse","gamma"); dat
#col=terrain.colors(5)) or
col=topo.colors(500)
plot(-1,-1,xlim=c(0,22),
ylim=c(0,12),xlab="Wind speed(m/s)",ylab="Probability density
function (%)")
grid(col="darkgrey",
lty="solid", lwd=0.1)
par(new=T)
hist(V_60,
prob=T, col= "cornsilk",main=NA,
xlab=NA,ylab=NA,axes=FALSE,border="blue")
par(new=T)
plot(V_60,weibullmm,pch=3,cex=0.0,col="red",axes=FALSE,add=T,ylab=NA,xlab=NA)
x=a$density
par(new=T)
curve(dweibull(x,kmm,
cmm), lty=1,lwd=2,axes=FALSE, col="red",add=T,xlab="",
ylab="")
require(VGAM)
par(new=T)
plot(V_60,rayleigh,pch=3,cex=0.0,col="red",axes=FALSE,add=T,ylab=NA,xlab=NA)
x=c$density
par(new=T)
curve(drayleigh(x,
scale=7.2), lty=1,lwd=2,col="blue3", axes=FALSE, add=T,
xlab="", ylab="")
#curve(dweibull(x,2,cmm),
lty=1,lwd=2,axes=FALSE, col="blue",add=T,xlab="",
ylab="")
#par(new=T)
#curve(dgenray(x,
scale = cmm, shape=kmm, log = FALSE),
lty=1,lwd=2,col="orange", add=T)
#https://clayford.github.io/dwir/dwr_12_generating_data.html
#hist(rayleigh,
freq=FALSE)
X
<- seq(min(V_60),max(V_60), length=92065) # x
#Y
<- dnorm(X, mean = mean(rayleigh), sd = sd(rayleigh)) # f(x) = dnorm
# verification avec la
moyenne de la distribution de la vitesse à goubet?
# A revoir le
trde vde fdte le pour a dstributin
#par(new=T)
#lines(X,Y,type =
"l", col="orange", axes=FALSE, xlab=NA, ylab=NA) # plot
(x,y) coordinates as a "blue" line ("l")
#par(new=T)
#plot(X,Y,type =
"l", col="orange", axes=FALSE, xlab=NA, ylab=NA) # plot
(x,y) coordinates as a "blue" line ("l")
par(new=T)
plot(V_60,lognormal,pch=3,cex=0.0,col="red",axes=FALSE,add=T,ylab=NA,xlab=NA)
x=b$density
par(new=T)
curve(dlnorm(x,
meanlog=2.17, sdlog=0.114, log=F),
lty=1,lwd=2,col="chartreuse3",add=T, axes=FALSE, xlab=NA, ylab=NA)
#par(new=T)
#curve(dlnorm(x,
meanlog = mean(log(V_60)), sdlog = sd(log(V_60)),log =T),
lty=1,lwd=2,col="green",add=T, axes=FALSE)
par(new=T)
plot(V_60,MSEF,pch=3,cex=0.0,col="red",axes=FALSE,add=T,ylab=NA,xlab=NA)
x=d$density
par(new=T)
curve(dweibull(x,
kmm, cmm),axes=FALSE, lty=1,lwd=2,col="cyan",add=T, xlab=NA, ylab=NA)
par(new=T)
plot(V_60,gamma,pch=3,cex=0.0,col="red",axes=FALSE,add=T,ylab=NA,xlab=NA)
x=e$density
par(new=T)
curve(dweibull(x,
kmm, cmm),axes=FALSE, lty=1,lwd=2,col="yellow",add=T, xlab=NA,
ylab=NA)
plot(-1,-1,xlim=c(0,20),
ylim=c(0,20),xlab="",ylab="")
legend("topright",c(
"Weibull",
"Rayleigh",
"Lognormal",
"MSE-Weibull"),
lty=c(1,1,1,3),
col=c("red",
"blue3","chartreuse3","cyan"),
lwd=c(5,5,5,5), bty="n",
cex=1.8)
plot(-1,-1,xlim=c(0,20),
ylim=c(0,20),xlab="",ylab="")
legend("top",c("Vm
=9.08 m/s","sd = 3.71","skew = -0.08","kurt =
-0.65", "min =0.4m/s", "max = 20.8m/s" ),
cex=1.7,bg="white",box.col="black")
FCmm=1-exp(-((V_60/cmm)^kmm))
plot(V_60,
FCmm, pch=20, ylab="Cumulative distribution function",
xlab="Wind speed (m/s)")
grid()
par(new=TRUE)
#
MATHEMATIC AND PHYSIC ENERGY WIND SPEED ASSESMENT
rm(list=ls());
rm(list=ls()); rm(list=ls())
pd<-read.csv(file("clipboard"),header=T,sep="\t",
dec=",",row.names=1)
attach(pd);
str(pd); names(pd)
#
v2= goubet
#EXTRAPOLATION
OF WIND SPEED BY COEFFICIENT OF SHEAD
v1=v2;
h1= 40; h2= 100
alpha=
(0.37 -( 0.088 * log(v1) )) / (1-0.088 * log( h1/ 10))
v2
= v1*((h2/h1)^alpha)
#POWER
density
Yoboki=1.127139204;
Gobaad=1.159309632
PetitBara=1.173332799; Moulhoulé=1.157087117; KhorAngar=1.15
#air
density = 1.255 kg/m^3
PD
= 1/2 * (Yoboki)*(mean (v2)^3)
mean
(v2); PD
#PARAMETER
OF WEIBULL KC
kmm=(sd(v2)/mean(v2))^-1.086
; kmm
cmm=mean(v2)/gamma((1+(1/kmm)));
cmm
#CAPACITY
FACTOR INTERMITTENCE FACTOR
k=kmm;
c=cmm
vi=4; vr=12; vo=25;
Pout= exp(-((vi/c)^k))
- exp(-((vr/c)^k))
Pr=((vr/c)^k) -
((vi/c)^k)
CF = (Pout/Pr) -
exp(-(vo/c)^k); CF
#AIR
DENSITY ( gasparf=287j/kg.K with mean
monthly)
#robar=mean(pres)/(mean(temp)*287)
#robar
#Most
probable wind speed:
Vp= ((kmm-1)/kmm)^(1/kmm); Vp
#Ideal
wind speed :
Vid= cmm*(((kmm+2)/kmm )^(1/kmm)); Vid
#Kinetic
energy resulting:
KE
= 1/2* (1.225) * 6263 *
(V_60^3)
#Cumulative
distribution function (CDF):
F= 1- exp(-
((V_60/cmm)^kmm) )
plot(V_60,F,
type ="p", col="blue")
#
Or ecd()
#MOST
PROBABLE WIND SPEED
MPWS=cmm*( (1-(1/kmm))^(1/kmm) )
MPWS
#
MAXIMUM ENERGY CARRING WIND SPEED
MEWS=cmm*( (1-(2/kmm))^(1/kmm) )
MEWS
#POWER
density of weibull
PDw=
1/2 * PH*c^3 * gamma(1+ (3/k))
#ENERGY
density for a duration t
E=
PDw * t
#AIR
density/ gas constant =287 J/kg.K.
p=
average / (averagetemp * gasconstant)
#POWER
P actually produced at time t
Pt = Pr*
(vt²-vci²)/(vr²-vci²)
pr=1/2 * ph * A*CP*
Vr^3
# Si (vci
vt vr)
#Annual
energy production (AEP):
EOUT
= 8760 * sum cut-outàcut-in ( Fequency distribution *wind power)
#Energy
POWER output
E=
sum(Pi*T)
EWT=nbturbine
* PN*CF*365*24
#CAPACITY
FACTOR
CF = E/Pr * T* N
CF = EOUT/Er
#
ECONOMY and FINANCE
#
data=write.table(weibullmm, "data.txt")
#
ECONOMIC WIND SPEED ASSESMENT
#
CAPACITY FACTOR
k=(sd(V_60)/mean(V_60))^-1.086
c=mean(V_60)/gamma((1+(1/k)))
vi=3; vr=13; vo=25;
Pout= exp(-((vi/c)^k))
- exp(-((vr/c)^k))
Pr=((vr/c)^k) -
((vi/c)^k)
CF = (Pout/Pr) -
exp(-(vo/k)^k); CF
CF=0.43784975
#PRESENT
VALUE COST: Ahmed (2012)/Renewable and
Sustainable Energy Reviews (12.110)
I=
1600*2500*40+(1600*2500*40*0.3)#Total investment cost (I)
COM=0.25*I #Annual operation and maintenance costs
T=
20 #lifetime of Turbine
i=0.024 #inflation rate
r=0.12 #interest rate
S=0.10
*I #scrap value
PVC = I
+ (COM *((1+i)/(r-i)))* (
1-((1+i)/(1+r))^T)- (S*((1+i)/(1+r))^T)
E
= 40*2500*365*24*20*CF # GWh/year
#
rbind(I,
COM, T, i, r, S, E, PVC)
LCOE1
= PVC
/ E ; LCOE1 # $/KWh
#AVERAGE
oF Levelized Cost of Wind Energy: (Alfawzan and all 2019)
CC=
1477 #Total capital cost per
unit: ($/KW)
IR=
0.024 #Inflation rate (%)
DR=
0.12 #Discount rate (%)
Dr= 0
#Debt ratio (%)
Dir= 0
#Debt interest rate (%)
Dt= 0
#Debt term (y)
Eev= 0
#Electricity export escalating rate
(%)
CRF=(DR*((1+DR)^T))/(((1+DR)^T)-1) #Capital recovery factor(__)
T=
20 #Time of operation (yr)
CFOM=58 #Fixed cost per unit of power: ($/KW/yr )
CVOM=(2.5/100) #Variable per unit of Energy (C$ /KWh )
CF
= 0.4378496
#
rbind(CC,IR,
DR, CRF, T, CFOM, CVOM, CF)
LCOE2 =(((CC * CRF)
+ CFOM)/ (CF*8760)) + (CVOM/(1)); LCOE2
#Levelized
Cost of Wind Energy: (Razaei and all 2020)
#
ASSESMENT FINANCIAL AND RISK
#https://www.homerenergy.com/products/pro/docs/latest/system_fixed_operations_and_maintenace_om_cost.html
#
REAL DISCOUNT RATE
#rdr=
nomdr-inf / 1+inf #0.12
#r=read.table(file("clipboard"),
header=T, sep="\t", dec=",", row.names=1)
#attach(r);
names(r)
library(prettyR);
describe(r)
#
Intervalle de Confiance 95% 1.96
#a=mean(cost)-1.29*sd(cost)/sqrt(length(cost))
#b=mean(cost)+1.29*sd(cost)/sqrt(length(cost))
#library(binom)
#binom.confint(269,405,method="exact")
#
# MONTE CARLO SIMULATION
#Des informations de
probabilités pour UNE SIMULATION MONTE CARLO(echantillonage des probabilités)
#INVEST
COST
#CCs=runif(10000,
min=(CC-(CC*0.3)), max=(CC+(CC*0.3)))
# $/KW
IC=1600;
IC
ICs=runif(1000000,
min=700, max=1600)
sIC
<- rlnorm(ICs, mean = mean(log(ICs)), sd = sd(log(ICs)) )
x11()
#
ggplot Histogramme
hp<-qplot(x
=sIC, fill=..count.., geom="histogram");
hp+scale_fill_gradient(low="red",
high="green")+theme_classic() + theme_linedraw() # +
theme_linedraw() + theme_light()
hist(sIC
, prob=T, xalb=NA, ylab=NA, main=NA, col="blue3", breaks=100)
plot(-1,-1,xlim=c(500,3500),
ylim=c(0,0.0015),xlab="$/KW",ylab="Probability density function
(%)")
grid(col="darkgrey",
lty="solid", lwd=0.1)
#grid(NULL, NA)
par(new=T)
plot(-1,-1,xlim=c(500,3500),
ylim=c(0,0.0015),xlab="$/KW",ylab="Probability density function
(%)")
par(new=T) #"FD"
{hist(sIC
, prob=T,border="blue", breaks=100,col=cm.colors(500),
main=NA,xlab=NA,ylab=NA,axes=FALSE)}
lines(density(sIC),
col='red', lty=1, lwd=1)
par(new=T);plot(ecdf(sIC),axes=NA,
xlab=NA, ylab=NA, add=TRUE, main=NA, lwd=2, col="yellow2")
#
https://stat.ethz.ch/R-manual/R-devel/library/stats/html/ecdf.html
plot(-1,-1,xlim=c(500,3500),
ylim=c(0,0.0015),xlab="$/KW",ylab="Probability density function
(%)")
legend("topright",c("Capital
Cost (K/KW)"),text.font=c(2),fill=c(cm.colors(500)), bty="n" )
legend("right",c("Density"),text.font=c(2),col='blue3',
lty=5, lwd=5, bty="n" )
#legend(0.0047,480,c("Quantiles
Adult","Quantiles Children"),text.font=c(2,2),
lwd=c(2,2),lty=c(3,1),bty="n")
#
FOUNDATION COST
FDC=0.3*IC
sFDC=runif(1000000,
min=(FDC-(FDC*0.3)), max=(FDC+(FDC*0.3))
)
#
ggplot Histogramme
hp<-qplot(x
=sFDC, fill=..count.., geom="histogram");
hp+scale_fill_gradient(low="red",
high="green")+theme_classic() + theme_linedraw() # +
theme_linedraw() + theme_light()
x11()
hist(sFDC,
prob=T, xalb=NA, ylab=NA, main=NA)
plot(-1,-1,xlim=c(0,15),
ylim=c(0,20),xlab="%",ylab="Probability density function
(%)")
grid(col="darkgrey",
lty="solid", lwd=0.1)
par(new=T)
plot(-1,-1,xlim=c(0,15),
ylim=c(0,20),xlab="%",ylab="Probability density function
(%)")
par(new=T)
{hist(sFDC
, prob=T,border="blue3",
breaks="FD",col="yellow",
main=NA,xlab=NA,ylab=NA,axes=FALSE)}
lines(density(sITR),
col='purple', lty=5, lwd=2)
par(new=T);plot(ecdf(sFDC),axes=FALSE,
xlab=NA, ylab=NA, add=TRUE, main=NA, lwd=5, col="yellow2")
plot(-1,-1,xlim=c(0,15),
ylim=c(0,20),xlab="%",ylab="Probability density function
(%)")
legend("topright",c("Foundation
Cost ($) "),text.font=c(2),fill=c(topo.colors(500)), bty="n" )
legend("right",c("Density"),text.font=c(2),col='purple',
lty=5, lwd=1, bty="n" )
#
COST O&M
#25%
all year OR 6% per year
#COM=0.06*CC;
COM
#COMs
= runif(10000, min=10.28, max=60.00)
#sCOM
<- rnorm(COMs, mean = mean(COMs), sd = sd(COMs))
# $/KW/YR
CF=runif(1000000,
min=10.28, max=60.00)
getmode
<- function(v) { uniqv <- unique(v); uniqv[which.max(tabulate(match(v,
uniqv)))]}
require(EnvStats)
CFs = runif(1000000,
min=10.28, max=60.00)
sCF <- rtri(CFs,
min = min(CFs), max = max(CFs), mode=getmode(CFs))
#
ggplot Histogramme
hp<-qplot(x
=sCF, fill=..count.., geom="histogram");
hp+scale_fill_gradient(low="red",
high="green")+theme_classic() + theme_linedraw() # +
theme_linedraw() + theme_light()
x11()
hist(sCF
, prob=T, xalb=NA, ylab=NA, main=NA)
plot(-1,-1,xlim=c(10,60),
ylim=c(0,0.04),xlab=" $/KW/YR",ylab="Probability density
function (%)")
grid(col="darkgrey",
lty="solid", lwd=0.1)
#grid(NULL, NA)
par(new=T)
plot(-1,-1,xlim=c(10,60),
ylim=c(0,0.04),xlab=" $/KW/YR",ylab="Probability density
function (%)")
par(new=T)
{hist(sCF
, prob=T,border="orange",
breaks="FD",col=terrain.colors(500),
main=NA,xlab=NA,ylab=NA,axes=FALSE)}
lines(density(sCF),
col='green', lty=5, lwd=5)
par(new=T);plot(ecdf(sCF),axes=FALSE,
xlab=NA, ylab=NA, add=TRUE, main=NA, lwd=5, col="yellow2")
plot(-1,-1,xlim=c(10,60),
ylim=c(0,0.04),xlab="$/KW",ylab="Probability density function
(%)")
legend("topright",c("Fixed
O&M cost ($/KW) "),text.font=c(2),fill=c(terrain.colors(500)),
bty="n" )
legend("right",c("Density"),text.font=c(2),col='green',
lty=5, lwd=5, bty="n" )
# $/MWh
CV=runif(1000000,
min=4.82, max=23.00)
CVs
= runif(1000000, min=4.82, max=23.00)
sCV
<- rlnorm(CVs, mean = mean(log(CVs)),
sd = sd(log(CVs)) )
#
ggplot Histogramme
hp<-qplot(x
=sCV, fill=..count.., geom="histogram");
hp+scale_fill_gradient(low="red",
high="green")+theme_classic() + theme_linedraw() # +
theme_linedraw() + theme_light()
x11()
hist(sCV
, prob=T, xalb=NA, ylab=NA, main=NA)
plot(-1,-1,xlim=c(0,80),
ylim=c(0,0.06),xlab="$/MWh",ylab="Probability density function
(%)")
grid(col="darkgrey",
lty="solid", lwd=0.1)
par(new=T)
plot(-1,-1,xlim=c(0,80),
ylim=c(0,0.06),xlab="$/MWh",ylab="Probability density function
(%)")
par(new=T)
{hist(sCV
, prob=T,border="orange", breaks="FD",col=topo.colors(500),
main=NA,xlab=NA,ylab=NA,axes=FALSE)}
lines(density(sCV),
col='purple', lty=5, lwd=2)
par(new=T);plot(ecdf(sCV),axes=FALSE,
xlab=NA, ylab=NA, add=TRUE, main=NA, lwd=5, col="yellow2")
plot(-1,-1,xlim=c(0,80),
ylim=c(0,0.06),xlab="$/MWh",ylab="Probability density function
(%)")
legend("topright",c("Variable
O&M cost ($/MWh) "),text.font=c(2),fill=c(topo.colors(500)),
bty="n" )
legend("right",c("Density"),text.font=c(2),col='purple',
lty=5, lwd=5, bty="n" )
#
The Exponential Distribution
require(stats)
dexp(x,
rate = 1, log = FALSE)
pexp(q,
rate = 1, lower.tail = TRUE, log.p = FALSE)
qexp(p,
rate = 1, lower.tail = TRUE, log.p = FALSE)
rexp(n,
rate = 1)
#FINANCIAL
INPUT
# Discount Rate
%
DR=9
DRs=runif(1000000,
min=(DR-(DR*0.3)), max=(DR+(DR*0.3)))
sDR
<- rnorm(DRs, mean = mean(DRs), sd = sd(DRs))
#
ggplot Histogramme
hp<-qplot(x
=sDR, fill=..count.., geom="histogram");
hp+scale_fill_gradient(low="red",
high="green")+theme_classic() + theme_linedraw() # +
theme_linedraw() + theme_light()
hist(sDR
, prob=T, xalb=NA, ylab=NA, main=NA)
plot(-1,-1,xlim=c(0,80),
ylim=c(0,0.06),xlab="$/MWh",ylab="Probability density function
(%)")
grid(col="darkgrey",
lty="solid", lwd=0.1)
#grid(NULL, NA)
par(new=T)
plot(-1,-1,xlim=c(0,80),
ylim=c(0,0.06),xlab="$/MWh",ylab="Probability density function
(%)")
par(new=T)
{hist(sDR
, prob=T,border="yellow", breaks="FD",col="blue",
main=NA,xlab=NA,ylab=NA,axes=FALSE)}
lines(density(sDR),
col='red', lty=5, lwd=2)
par(new=T);plot(ecdf(sDR),axes=FALSE,
xlab=NA, ylab=NA, add=TRUE, main=NA, lwd=5, col="yellow2")
plot(-1,-1,xlim=c(0,80),
ylim=c(0,0.06),xlab="$/MWh",ylab="Probability density function
(%)")
legend("topright",c("
Discount Rate (%)
"),text.font=c(2),fill=c(topo.colors(500)), bty="n" )
legend("right",c("Density"),text.font=c(2),col='red',
lty=5, lwd=5, bty="n" )
# Interest Rate
%
ITR=12
ITRs=runif(1000000,
min=(ITR-(ITR*0.3)), max=(ITR+(ITR*0.3))
)
sITR
<- rnorm(ITRs, mean = mean(ITRs), sd = sd(ITRs))
#
ggplot Histogramme
hp<-qplot(x
=sITR, fill=..count.., geom="histogram");
hp+scale_fill_gradient(low="red",
high="green")+theme_classic() + theme_linedraw() # +
theme_linedraw() + theme_light()
x11()
hist(sITR
, prob=T, xalb=NA, ylab=NA, main=NA)
plot(-1,-1,xlim=c(0,15),
ylim=c(0,20),xlab="%",ylab="Probability density function
(%)")
grid(col="darkgrey",
lty="solid", lwd=0.1)
par(new=T)
plot(-1,-1,xlim=c(0,15),
ylim=c(0,20),xlab="%",ylab="Probability density function
(%)")
par(new=T)
{hist(sITR
, prob=T,border="blue3",
breaks="FD",col="yellow",
main=NA,xlab=NA,ylab=NA,axes=FALSE)}
lines(density(sITR),
col='purple', lty=5, lwd=2)
par(new=T);plot(ecdf(sITR),axes=FALSE,
xlab=NA, ylab=NA, add=TRUE, main=NA, lwd=5, col="yellow2")
plot(-1,-1,xlim=c(0,15),
ylim=c(0,20),xlab="%",ylab="Probability density function
(%)")
legend("topright",c("Interest
Rate (%)
"),text.font=c(2),fill=c(topo.colors(500)), bty="n" )
legend("right",c("Density"),text.font=c(2),col='purple',
lty=5, lwd=1, bty="n" )
# Inflation Rate %
IFR=2.4
IFRs=runif(1000000,
min=(IFR-(IFR*0.3)), max=(IFR+(IFR*0.3)))
sIFR
<- rnorm(IFRs, mean = mean(IFRs), sd = sd(IFRs))
#
ggplot Histogramme
hp<-qplot(x
=sIFR, fill=..count.., geom="histogram");
hp+scale_fill_gradient(low="red",
high="green")+theme_classic() + theme_linedraw() # +
theme_linedraw() + theme_light()
x11()
hist(sIFR
, prob=T, xalb=NA, ylab=NA, main=NA)
plot(-1,-1,xlim=c(0,20),
ylim=c(5,15),xlab="$/MWh",ylab="Probability density function
(%)")
grid(col="darkgrey",
lty="solid", lwd=0.1)
#grid(NULL, NA)
par(new=T)
plot(-1,-1,xlim=c(0,20),
ylim=c(5,15),xlab="%",ylab="Probability density function
(%)")
par(new=T)
{hist(sIFR
, prob=T,border="red", breaks="FD",col="red",
main=NA,xlab=NA,ylab=NA,axes=FALSE)}
lines(density(sIFR),
col='red', lty=5, lwd=2)
par(new=T);plot(ecdf(sIFR),axes=FALSE,
xlab=NA, ylab=NA, add=TRUE, main=NA, lwd=5, col="yellow2")
plot(-1,-1,xlim=c(0,20),
ylim=c(5,15),xlab="$/MWh",ylab="Probability density function
(%)")
legend("topright",c("Inflation
Rate (%) "),text.font=c(2),fill=c(topo.colors(500)), bty="n" )
legend("right",c("Density"),text.font=c(2),col='red',
lty=5, lwd=3, bty="n" )
#POTENTIAL INPUT
#Capacité de facteur %
sFCF=runif(1000000,
min=26, max=52)
hist(sFCF
, prob=T, xalb=NA, ylab=NA, main=NA)
x11()
#
ggplot Histogramme
hp<-qplot(x
=sFCF, fill=..count.., geom="histogram");
hp+scale_fill_gradient(low="red",
high="green")+theme_classic() + theme_linedraw() # +
theme_linedraw() + theme_light()
plot(-1,-1,xlim=c(25,50),
ylim=c(5,15),xlab="$/MWh",ylab="Probability density function
(%)")
grid(col="darkgrey",
lty="solid", lwd=0.1)
par(new=T)
plot(-1,-1,xlim=c(0,20),
ylim=c(5,15),xlab="%",ylab="Probability density function
(%)")
par(new=T)
{hist(sIFR
, prob=T,border="red", breaks="FD",col="red",
main=NA,xlab=NA,ylab=NA,axes=FALSE)}
lines(density(sIFR),
col='red', lty=5, lwd=2)
par(new=T);plot(ecdf(sFCF),axes=FALSE,
xlab=NA, ylab=NA, add=TRUE, main=NA, lwd=5, col="yellow2")
plot(-1,-1,xlim=c(0,20),
ylim=c(5,15),xlab="$/MWh",ylab="Probability density function
(%)")
legend("topright",c("Capacité
de facteur(%) "),text.font=c(2),fill=c(topo.colors(500)),
bty="n" )
legend("right",c("Density"),text.font=c(2),col='red',
lty=5, lwd=3, bty="n" )
#Durée de vie yr
sLFT=runif(1000000,
min=15, max=20)
hist(sLFT
, prob=T, xalb=NA, ylab=NA, main=NA)
x11()
#
ggplot Histogramme
hp<-qplot(x
=sLFT, fill=..count.., geom="histogram");
hp+scale_fill_gradient(low="red",
high="green")+theme_classic() + theme_linedraw() # +
theme_linedraw() + theme_light()
plot(-1,-1,xlim=c(0,20),
ylim=c(5,15),xlab="yr",ylab="Probability density function
(%)")
grid(col="darkgrey",
lty="solid", lwd=0.1)
#grid(NULL, NA)
par(new=T)
plot(-1,-1,xlim=c(0,20),
ylim=c(5,15),xlab="%",ylab="Probability density function
(%)")
par(new=T)
{hist(sLFT
, prob=T,border="red", breaks="FD",col="red",
main=NA,xlab=NA,ylab=NA,axes=FALSE)}
lines(density(sLFT),
col='red', lty=5, lwd=2)
par(new=T);plot(ecdf(sLFT),axes=FALSE,
xlab=NA, ylab=NA, add=TRUE, main=NA, lwd=5, col="yellow2")
plot(-1,-1,xlim=c(0,20),
ylim=c(5,15),xlab="$/MWh",ylab="Probability density function
(%)")
legend("topright",c("Life
Time (yr) "),text.font=c(2),fill=c(topo.colors(500)), bty="n" )
legend("right",c("Density"),text.font=c(2),col='red',
lty=5, lwd=3, bty="n" )
#Degradation de la
production %
DGs=runif(1000000,
min=0, max=5)
sDG
<- rlnorm(DGs, mean = mean(log(DGs)),
sd = sd(log(DGs)) )
h=hist(sDG
, prob=T, xalb=NA, ylab=NA, main=NA)
x11()
#
ggplot Histogramme
hp<-qplot(x
=sDG, fill=..count.., geom="histogram");
hp+scale_fill_gradient(low="red",
high="green")+theme_classic() + theme_linedraw() # +
theme_linedraw() + theme_light()
plot(-1,-1,
xlim=c(5,50),ylim=c(0,0.8),xlab="%",ylab="Probability density
function (%)")
grid(col="darkgrey",
lty="solid", lwd=0.1)
#grid(NULL, NA)
par(new=T)
plot(-1,-1,
xlim=c(5,50),ylim=c(0,0.8),xlab="%",ylab="Probability density
function (%)")
par(new=T)
{hist(sDG
, prob=T,border="red", breaks='FD',col="red",
main=NA,xlab=NA,ylab=NA,axes=FALSE)}
lines(density(sDG),
col='red', lty=5, lwd=2)
par(new=T);plot(ecdf(sDG),axes=FALSE,
xlab=NA, ylab=NA, add=TRUE, main=NA, lwd=5, col="yellow2")
plot(-1,-1,
xlim=c(5,50),ylim=c(0,0.8),xlab="%",ylab="Probability density
function (%)")
legend("topright",c("Degrdataion
(%) "),text.font=c(2),fill=c(topo.colors(500)), bty="n" )
legend("right",c("Density"),text.font=c(2),col='red',
lty=5, lwd=3, bty="n" )
#Life-cycle
GHG emissions(g CO2eq/kWh)
sGHG=runif(1000000,
min=8.4, max=20)
hist(sGHG
, prob=T, xalb=NA, ylab=NA, main=NA)
x11()
#
ggplot Histogramme
hp<-qplot(x
=sGHG, fill=..count.., geom="histogram");
hp+scale_fill_gradient(low="red",
high="green")+theme_classic() + theme_linedraw() # +
theme_linedraw() + theme_light()
plot(-1,-1,xlim=c(0,20),
ylim=c(5,15),xlab="$/MWh",ylab="Probability density function
(%)")
grid(col="darkgrey",
lty="solid", lwd=0.1)
#grid(NULL, NA)
par(new=T)
plot(-1,-1,xlim=c(0,20),
ylim=c(5,15),xlab="%",ylab="Probability density function
(%)")
par(new=T)
{hist(sIFR
, prob=T,border="red", breaks="FD",col="red",
main=NA,xlab=NA,ylab=NA,axes=FALSE)}
lines(density(sIFR),
col='red', lty=5, lwd=2)
par(new=T);plot(ecdf(sGHG),axes=FALSE,
xlab=NA, ylab=NA, add=TRUE, main=NA, lwd=5, col="yellow2")
plot(-1,-1,xlim=c(0,20),
ylim=c(5,15),xlab="$/MWh",ylab="Probability density function
(%)")
legend("topright",c("Inflation
Rate (%) "),text.font=c(2),fill=c(topo.colors(500)), bty="n" )
legend("right",c("Density"),text.font=c(2),col='red',
lty=5, lwd=3, bty="n" )
#|
SECOND STEPS OF MC LCOE
#
ceRtainty: Certainty Equivalent in R
#install.packages("ceRtainty")
library(ceRtainty)
data("profitSWG");
profitSWG
summary(profitSWG);
names(profitSWG)
dim(profitSWG);
str(profitSWG)
#RAC doit être une
valeur relative ou absolue:
#Obtaining
the CE table
##
Computing the CE values, for a RAC range of 0.5-4.0, and Power utility
function.
certainty(data
= profitSWG, ival = 0.5, fval = 4, utility = "Power")$CE_values
#
Obtaining the RAC vector
certainty(data=profitSWG,ival=.5,fval=4,utility="Power")$RAC
##
Performing the CE plot
certainty(data=profitSWG,ival=.5,fval=4,utility="Power")$CE_plot()
#prime de risque est
une mesure à comparer entre les EC
#
Computing and storing the CE values using Power utility function
ces <- certainty(data = profitSWG, ival
= 0.5, fval = 4, utility = "Power")
ces_values <- ces$CE_values # store CE table
ces_rac <- ces$RAC # store RAC vector
#
Computing the RP values respect to SERENADE treatment
premium(tbase
= "serenade",ce_data = ces_values, rac = ces_rac, utility =
"Power")$PremiumRisk
#
Computing the RP values in percentage respect to SERENADE treatment
premium(tbase
= "serenade",ce_data = ces_values, rac = ces_rac, utility =
"Power")$PremiumRiskPer100
#
Plotting the RP absolute values
premium(tbase
= "serenade",ce_data = ces_values, rac = ces_rac, utility =
"Power")$RP_plot()
#Generating
Risk Aversion Coefficients
rac_generator(data
= profitSWG$control, ini = 0.5, fin = 4.0)
#END MC LCOE
#col=terrain.colors(5))
col=topo.colors(500)
#sIC sFDC
sCF sCV sDR
sITR sIFR sCFC
sLFT sDG sGHG
IC=1600; FD=0.3*1600;
CRF= 0.11248912; FC = 60; T=8760;
CF = 0.43784975 ;
CV=23/1000
LCOE =((((IC+(FD))*CRF
)+FC) / (T*CF)) + (CV) ; LCOE
sCRF=((sDR/100)*(((sDR/100)+1)^20))/
((((sDR/100)+1)^20 )-1)
sLCO
=( (((sIC)+ (sFDC)) * (sCRF)) + (sCF)
)/ (8760*(sFCF/100)) #*sDG without
sLCOE=
sLCO+(sCV/1000)
head(sLCOE);
tail(sLCOE)
summary(sLCOE)
#View(sLCOE)
hist(sLCOE
, prob=T, xalb=NA, ylab=NA, main=NA)
x11()
#
ggplot Histogramme
hp<-qplot(x
=sLCOE, fill=..count.., geom="histogram");
hp+scale_fill_gradient(low="red",
high="green")+theme_classic() + theme_linedraw() # +geom_density(alpha=.2, fill="#FF6666") +theme_linedraw() +theme_light()
plot(-1,-1,xlim=c(0.025,0.17),
ylim=c(0,22),xlab="Wind LCOE ($/KWh)",ylab="Probability density
function (%)")
grid(col="grey2",
lty="solid", lwd=0.1)
#grid(NULL, NA)
par(new=T)
plot(-1,-1,xlim=c(0.025,0.17),
ylim=c(0,22),xlab="Wind LCOE ($/KWh)",ylab="Probability density
function (%)")
#breaks="FD"
par(new=T)
{hist(sLCOE,add=TRUE,
prob=T,border="yellow", breaks=100,col="blue3",
main=NA,xlab=NA,ylab=NA,axes=FALSE)}
lines(density(sLCOE),
col='red', lty=1, lwd=1)
#
plot(-1,-1,xlim=c(0.025,0.17),
ylim=c(0,22),xlab="Wind LCOE ($/KWh)",ylab="Probability density
function (%)")
legend("topright",c("Degrdataion
(%) "),text.font=c(2),fill=c(topo.colors(500)), bty="n" )
legend("right",c("Density"),text.font=c(2),col='red',
lty=5, lwd=3, bty="n" )
#library(wesanderson);
names(wes_palettes)
#wes_palette("Zissou1",
10, type = "continuous")) #heat.colors(n), terrain.colors(n),
topo.colors(n), et cm.colors(n)
#hist(sLCOE,xlim=c(0,0.10),
ylim=c(0,75),breaks=170, prob=T, main=NA,xlab=" LCOE ($/KW) ", ylab=NA, col=
heat.colors(70),border="darkslategrey")
#require(lattice)
#histogram(~sLCOE,xlim=c(0,0.10),breaks=170,
prob=T, main=NA,xlab=" LCOE ($/KW)
", ylab=NA, col= heat.colors(70),border="darkslategrey")
cbind(quantile(sLCOE,c(0.05,0.50,0.95)))
numb1
<- cbind(quantile(sLCOE,c(0.05,0.50,0.95)));
library(scales);scientific(numb1, digits = 3)
e=
0.05442740
abline(v=e,
lwd=0.5, col="darkred",lty=2)
#legend(e, 70 ,"5%=
3.718e-4",cex=0.75,bg="white")
#
#f=
0.04808734
#abline(v=f,
lwd=0.5, col="darkblue",lty=2)
#legend(f, 70
,"50%= 2.342e-3"
,cex=0.75,bg="white")
#
g=0.11629905
abline(v=
g, lwd=0.5, col="darkgreen",lty=2)
#legend(g,
70
,"95%=4.314e-3",cex=0.75,bg="white")
cbind(round(summary(sLCOE),4))
round(sd(sLCOE),4)
#https://clayford.github.io/dwir/dwr_12_generating_data.html
#q
<- qnorm(p = 0.15, mean = 100, sd = 5)
#xx
<- c(seq(85,q,length.out = 100),rev(seq(85,q,length = 100)))
#yy
<- c(rep(0,100),dnorm(rev(seq(85,q,length = 100)), mean = 100, sd = 5))
#polygon(x=xx,y=yy,col="grey")
#
annotate graph
#text(x
= 93, y = 0.005,labels = pnorm(q,mean = 100,sd = 5))
#text(x
= q, y = 0.06, labels = round(q,2))
sLCOE =(((sCC * CRF) + sCFOM)/ (sCF*8760)) + (sCVOM/(1)); sLCOE
h=hist(sLCOE,
breaks='FD', freq=F)
#'FD'/text(h$mids,h$counts,labels=h$counts,
adj=c(0.5,-0.5))
plot(-1,-1
, xlim=c(0.04,0.27), ylim=c(0,25), xlab="LCOE ($/KWh)",
ylab="Frequency (%)")
grid(lty=1,
col="grey")
par(new=TRUE)
#hist(sLCOE,
breaks='FD',axes=F,
freq=F,main=NA,xlab=NA,ylab=NA,col="blue",border="red")
plot(h$mids,
h$density, , type="h", axes=FALSE,
lwd=3,col="blue3", xlab=NA, ylab=NA)
lines(density(LCOEs),
lwd=2, lty=1,col='red')
require(actuar)
quantile(sLCOE)
quantile(sLCOE, probs
= seq(0.05, 0.9, 0.01))
q=cbind(quantile(sLCOE,c(0.1,0.9)))
library(scales);scientific(q,
digits = 3)
a=0.07632508;
b=0.11637282
abline(v=a,
lwd=2, col="darkred",lty=1)
abline(v=b,
lwd=2, col="darkblue",lty=1)
legend(a, 25 ,
"10%= 0.07632508",cex=0.75,bg="white")
legend(b, 25
,"90%=0.11637282"
,cex=0.75,bg="white")
d10=sLCOE[sLCOE>=a]
d90=d10[d10<=b]
d=data.frame(d90)
write.table(d,
"data.txt")
r=read.table(file("clipboard"),
header=T, sep="\t", dec=",", row.names=1)
h=hist(r$LCOE,
freq=F, breaks=20)
plot(-1,-1
, xlim=c(0.04,0.27), ylim=c(0,25),xaxt="n", xlab="LCOE
($/KWh)", ylab="Frequency (%)")
grid(lty=1,
col="grey")
par(new=TRUE)
plot(h$mids,
h$density, type="h", axes=F, xaxt="n",
lwd=3,col="blue3", xlab=NA, ylab=NA)
#xaxt="n"
axis(1,
at=c(0.077,0.079,0.081, 0.083, 0.085, 0.087, 0.089, 0.091, 0.093, 0.095, 0.097,
0.099,
0.101, 0.103, 0.105,0.107, 0.109,
0.111, 0.113, 0.115 ),las=1, cex=.9)
barplot(h$density~h$mids,
col=cm.colors(14))
d=data.frame(h$mids,
h$density)
write.table(d,
"data.txt", sep="\t")
#RISK
AVERSION, RISK PREMIUM AND CERTAINTY EQUIVALENT
d=cbind(sLCOE);
e=cbind(sLCOE); f=cbind(sLCOE)
dd=data.frame(d[1:10000,])
library(ceRtainty)
certainty(data
= dd, ival = 0.5, fval = 4, utility = "Power")$CE_values
certainty(data=dd,ival=.5,fval=4,utility="Power")$RAC
cesdd <- certainty(data = dd, ival = 0.5,
fval = 4, utility = "Power")
ces_values <- cesdd$CE_values # store CE table
ces_rac <- cesdd$RAC # store RAC vector
#Generating
Risk Aversion Coefficients
rac_generator(data
= dd, ini = 0.5, fin = 4.0)
View(rac_generator(data
= dd, ini = 0.5, fin = 4.0))
AR=-5.158836 # HIGHER = FIRST VALUE OF ITERATION PRATT1964
RP
= (sum((d[1:10000,])/10000)) - ( (sum(
(d[1:10000,]^(1-AR)/(1-AR))*(1-AR))/(10000))^(1-AR) );RP
Ceq=
(sum((d[1:10000,])/10000)) +RP; Ceq
#Newton-Raphson
Method
##
rm(list=ls())
newton
<- function(f, tol = 1e-7, x0 = 1, N = 100){
h = 1e-7
i = 1; x1 = x0
p = numeric(N)
while (i <= N) {
df.dx = (f(x0 + h) - f(x0)) / h
x1 = (x0 - (f(x0) / df.dx))
p[i] = x1
i = i + 1
if (abs(x1 - x0) < tol) break
x0 = x1
}
return(p[1 : (i-1)])
}
##
End of the function
#
USES NEWTON RAPHSON METHOD
f
<- function(x){ 42191206.73 *(( (
(1+x)^20 ) )-1)/(x*( (1+x)^20 )
)-208000000 }
h
<- 1e-7
df.dx
<- function(x){(f(x + h) - f(x)) / h}
df.dx(1);df.dx(2)
app
<- newton(f, x0 =0.1);app
f(app[length(app)])
#
y=0.1973083
#
+(-208000000/((1+x)^0))
42191206.73
*(( ( (1+x)^20 ) )-1)/(x*( (1+x)^20 )
)-208000000
#METHODE
2 NEWTON RAPHSON
func2
<- function(x) {
(42191206.73 *((1+x)^(-20))) -7280000
}
curve(func2,
xlim=c(-5,5), col='blue', lwd=2, lty=1, ylab='f(x)');abline(h=0);abline(v=0)
uniroot(func2,
c(2,3))
(42191206.73
*((1+0.09182876)^(-20))) -7280000
newton.raphson
<- function(f, a, b, tol = 1e-5, n = 1000) {
require(numDeriv) # Package for computing
f'(x)
x0 <- a # Set start value to supplied
lower bound
k <- n # Initialize for iteration results
# Check the upper and lower bounds to see if
approximations result in 0
fa <- f(a)
if (fa == 0.0) {
return(a)
}
fb <- f(b)
if (fb == 0.0) {
return(b)
}
for (i in 1:n) {
dx <- genD(func = f, x = x0)$D[1] #
First-order derivative f'(x0)
x1 <- x0 - (f(x0) / dx) # Calculate next
value x1
k[i] <- x1 # Store x1
# Once the difference between x0 and x1
becomes sufficiently small, output the results.
if (abs(x1 - x0) < tol) {
root.approx <- tail(k, n=1)
res <- list('root approximation' =
root.approx, 'iterations' = k)
return(res)
}
# If Newton-Raphson has not yet reached
convergence set x1 as x0 and continue
x0 <- x1
}
print('Too many iterations in method')
}
newton.raphson(func2,
0.001, 3)
#https://rstudio-pubs-static.s3.amazonaws.com/205225_5991ce162f504576a84eac9c659aeaaf.html
#Or
methode TRI() excel on Cash Flow (in-out) from 0 to n
#
TAXE AND INCENTIVE STRATEGY OF DJIBOUTI
#EXCEL
DETAILS SHOW IST AWALEH
#Net
= Soustraction Ben- Cost
#NPVC
AND NPVB (#PVC)
#PVC
I=
1600*2500*40+(1600*2500*40*0.3)
COM=0.25*I;
T= 20; S=0.10 *I
ifl=0.024;
ir=0.12
#E
= 40*2500*365*24*20*CF
PVC=I+(COM*((1+ifl)/(ir-ifl)))*(1-((1+ifl)/(1+ir))^20)-(S*((1+ifl)/(1+ir))^20)
; PVC
#https://sci-hub.st/10.1115/POWER-ICOPE2017-3675
#METHDOLOGIE
I:PayBack of Rezaei and al.(2020) [Renewable Energy]
#Knowing
purhcass tarif with output energy acumulated by First year give de the benefit
#Knowing
all cost accumulated by year give the cost
#Intersection
both of them result of Pay Back
#METHDOLOGIE II:
#https://www.dolcera.com/wiki/images/Wind_power_energy.
Infl=0.024
Int=0.12
Disc=((1+Int)/(1+Infl))
-1
EUAB= 0.11 #Equivalent Uniform Annual Benefit#tarif
d'achat de l'énergie renouvelable en iran(0,13 $ kWh)
#https://www.afdb.org/fileadmin/uploads/afdb/Documents/Evaluation-Reports-_Shared-With-OPEV_/Ethiopie-Djibouti-Interconnection_%C3%A9lectrique-RAP-06-10-2011.pdf
#CUMULATIVE
CASH FLOW
r=read.table(file("clipboard"),
header=T, sep="\t", dec=",")
str(r);
attach(r); names(r)
DRc=
((1+r)/(1+i))-1; DRc
#C=(1477*100000*CRF)+
(58*100000)+(0.025*100000*8760); C
PVC=718094394
PVB=0.11*(E)
CF=
PVB-PVC
NPV=CF/((1+DRc)^t)
#https://www.journaldev.com/39620/how-to-create-an-area-plot-in-r-using-ggplot2#:~:text=The%20area%20graphs%20are%20the,points%20using%20the%20ggplot2%20package.
library(ggplot2)
library(hrbrthemes)
#reading
data into data frames
data<-
data.frame(Y, CCF)
xdata=Y;ydata=CCF
#plots
the area chart with theme, title and labels
ggplot(data,
aes(x=xdata, y=ydata))+
geom_area(fill='#142F86', alpha=1)+
geom_line(color='skyblue', size=1)+
geom_point(size=1, color='blue')+
ggtitle("Area plot using ggplot2 in
R")+
labs(x='Value', y='frequency')+
theme_ipsum()
# Changer la couleur
des traits par groupe
ggplot(df,
aes(x=weight, color=sex)) +
geom_density()
#
Ajouter les moyennes
p<-ggplot(df,
aes(x=weight, color=sex)) +
geom_density()+
geom_vline(data=mu, aes(xintercept=grp.mean,
color=sex),
linetype="dashed"); p
sp3+
theme(
# Masquer les bords des panneaux et supprimer les lignes
de grilles
panel.border
= element_blank(),
panel.grid.major = element_blank(),
panel.grid.minor = element_blank(),
# Modifier le trait des axes
axis.line = element_line(colour =
"black")
)
#http://www.sthda.com/french/wiki/ggplot2-courbe-de-distribution-guide-de-demarrage-rapide-logiciel-r-et-visualisation-de-donnees
r=read.table(file("clipboard"),
header=T, sep="\t", dec=","); str(r); attach(r); names(r)
p<-ggplot(r,
aes(x=m, y=ws, color=ws)) + geom_point(color="blue", size=3)
p<-ggplot(r,
aes(x=m, y=ws, color=ws)) + geom_line(size=1.3)
p+
theme_bw()+scale_color_gradientn(colours = rainbow(5))
#ggplot(ToothGrowth,
aes(x=dose, y=len)) +geom_boxplot()
#hp<-qplot(x
=x, fill=..count..,
geom="histogram");hp+scale_fill_gradient(low="blue",
high="red")
#http://www.sthda.com/french/wiki/ggplot2-couleurs-changer-les-couleurs-automatiquement-et-manuellement-logiciel-r-et-visualisation-de-donnees
#https://www.datanovia.com/en/fr/blog/ggplot-background-du-theme-couleur-et-quadrillage/
#SENSITIVITY
ANALYSIS BY GRAPH
#sIC sFDC
sCF sCV sDR
sITR sIFR sCFC
sLFT sDG sGHG
#
RANK SPEARMAN SENSITYVITY
dc=data.frame(sIC
, sFDC , sCF , sCV ,
sDR , sITR , sIFR
, sFCF , sLFT
, sDG , sGHG,sCRF, sLCOE)
sp=cor(dc,dc$sLCOE,method="spearman")
#method="spearman", alternative="two.sided"
sp
cIC =
0.1232932641
cFDC =
0.0388737283
cCF =
0.0455150223
cCV =
0.1903809174
cDR =
0.0852305248
cITR =
-0.0010613671
cIFR =
-0.0004500938
cFCF =
-0.1794478264
cLFT =
0.0022432644
cDG =
-0.9347707838
sCRF =
0.0852305248
cGHG =
0.0004282596
#COLORATION
colfunc<-colorRampPalette(c("red","yellow","springgreen","royalblue"))
plot(-100,100,
xlim=c(-10, 10), xlab= "Rank Correlation",
yaxt="n",xaxt="n" ,ylab=NA)
par(new=TRUE)
barplot(c(cIC,cFDC,cCF,cCV,cDR,cITR,cIFR,cFCF,cLFT,cDG,cGHG),horiz=T,
col=colfunc(150))
grid(NULL,NA,
col = "gray", lty = "dotted", lwd = par("lwd"),
equilogs = TRUE) # NULL OR NA OR c()
# PLOT LINE SENSITYVITY
dc=data.frame(median(sIC),median(sFDC),median(sCF),median(sCV),median(sDR),median(sITR),median(sIFR),median(sFCF)
,
median(sLFT) ,median(sDG) ,median(sGHG), median(sCRF),
median(sLCOE));cbind(dc)
dc5n=(-0.05*dc)+dc;
dc5p=(0.05*dc)+dc
dc10n=(-0.10*dc)+dc;
dc10p=(0.10*dc)+dc
dc15n=(-0.15*dc)+dc;
dc15p=(0.15*dc)+dc
dc20n=(-0.20*dc)+dc;
dc20p=(0.20*dc)+dc
#
impact on LCOE one by one following the formulas and using to delete the unit by divided the
LCOE first
rbind(dc5n,dc10n,dc15n,dc20n,
dc5p,dc10p,dc15p,dc20p)
#
TORNADO DIAGRAM SENSITYVITY
https://stackoverflow.com/questions/55751978/tornado-both-sided-horizontal-bar-plot-in-r-with-chart-axes-crosses-at-a-given/55755671
library(ggplot2)
library(plyr)
library(dplyr)
library(tidyverse)
#Tornado
by percentage or by value
#Or
use Excel value positive and negative bar horizentale, and change add value by
refencies not automatical
#http://rnotr.com/likert/ggplot/barometer/likert-plots/
#https://www.r-graph-gallery.com/202-barplot-for-likert-type-items.html
library(likert)
data(pisaitems)
#items28 <- pisaitems[, substr(names(pisaitems), 1, 5) == "ST24Q"]
p
<- likert(items28)
plot(p)
#
COUNTOUR DIAGRAM SENSITYVITY
a
= sDR
b
= sCF
c = sIC
#quantile(a,
0.25) ; a25= 7.943471
#quantile(a,
0.75) ; a75= 10.04866
#quantile(b,
0.25) ; b25= 22.92599
#quantile(b,
0.75) ; b75= 38.58379
#quantile(c,
0.25) ; c25= 955.9306
#quantile(c,
0.75) ; c75= 1312.535
#x=c(min(a),min(a),min(a),a25,
a25, a25,median(a),median(a),median(a),a75, a75,a75,max(a),max(a),max(a))
#y=c(min(b),min(b),min(b),b25,b25,b25,median(b),median(b),median(b),b75,b75,b75,max(b),max(b),max(b))
#z=c(min(c),min(c),min(c),c25,c25,c25,median(c),median(c),median(c),c75,c75,c75,max(c),max(c),max(c))
x=c(min(a),min(a),min(a),median(a),median(a),median(a),max(a),max(a),max(a))
y=c(min(b),min(b),min(b),median(b),median(b),median(b),max(b),max(b),max(b))
z=c(min(c),min(c),min(c),median(c),median(c),median(c),max(c),max(c),max(c))
df <-
data.frame(x=x,y=y,z=z)
#x=c(1,1,1,2,2,2,3,3,3) y=c(0,10,20,0,10,20,0,10,20)
z=c(900,800,700,600,500,400,300,200,100)
df <- data.frame(x=x,y=y,z=z)
library(plotly)
p
<- plot_ly(data = df, x=~x,y=~y, z=~z, type = "contour",
colorscale='Jet');p
#BASELINE
CASE
DR;
FC; IC
#
Heat Map SENSITYVITY (Origin Lap)
#
a
= sDR
b
= sCF
c
= sIC
d=data.frame(a,b,c)
write.table(round(d[1:1000,],3),
sep="\t", "d.txt")
#
#
TERNARY DIAGRAM SENSITYVITY
library(plotly)
#http://www.ggtern.com/2013/12/16/pps-3-state-model/
library(ggtern);
x11()
#Create
Base Plot
base
<- ggtern(data=df,aes(x,y,z,
colour=factor(z))) +
geom_point(size=5) +
theme_rgbw() +
custom_percent("%") +
labs(colour="Initial Cost ($/KW)")
+
theme(legend.position=c(0,1),
legend.justification=c(0,1))
print(base)
#Draw
same plot above using limiting region
lim
<- base +
limit_tern(.3,.3,1)
print(lim)
library(ggtern)
plot
<- ggtern(data = df, aes(x = x, y = y, z = z)) +geom_point(aes(fill = z),
size = 4, shape = 21, color = "black") +
ggtitle("") +
labs(fill = "Initial Cost ($/KW)")
+
theme_rgbw() + theme(legend.position =
c(0,1),
legend.justification =
c(0, 1) ); plot
library("plot3D")
a
= sFCF
b
= sCRF
c
= sLCOE
scatter3D(
a, b, c,bty = "g", theta = 33, phi = 12,pch = 20,cex=1.8,
clab = c("LCOE
($/KWh)"),addlines = TRUE,ticktype = "detailed",
main = "", xlab =
"Capacity Factor (%)",
ylab ="Capital Recovery
Factor", zlab = "LCOE ($/KWH)")
#text3D(BW,
Hg, THQ, labels = rownames(t), add =
TRUE, colkey = FALSE, cex = 0.5)
#https://stackoverflow.com/questions/10879361/ternary-plot-and-filled-contour
df
<- data.frame(a, b, c, d)
ggtern(df,aes(a,c,b))
+
geom_interpolate_tern(aes(value=d,fill=..level..),
binwidth=500,
colour="white") +
geom_point(aes(fill=d),color="black",shape=21,size=3) +
scale_fill_gradient(low="yellow",high="red") +
theme(legend.position=c(0,1),legend.justification=c(0,1)) +
labs(fill="Value, d")
#https://plotly.com/r/ternary-contour/
#http://www.ggtern.com/2015/08/03/ternary-interpolation-smoothing/
#https://stackoverflow.com/questions/45818875/how-to-add-labels-on-axis-in-ternary-diagram-using-ggtern-package
#https://plotly.com/r/ternary-plots/
journalist
<- c(75,70,75,5,10,10,20,10,15,10,20)
developer
<- c(25,10,20,60,80,90,70,20,5,10,10)
designer
<- c(0,20,5,35,10,0,10,70,80,80,70)
label
<- c('point 1','point 2','point 3','point 4','point 5','point 6',
'point 7','point
8','point 9','point 10','point 11')
df
<- data.frame(journalist,developer,designer,label)
#
axis layout
axis
<- function(title) {
list(
title = title,
titlefont = list(
size = 20
),
tickfont = list(
size = 15
),
tickcolor = 'rgba(0,0,0,0)',
ticklen = 5
)
}
fig
<- df %>% plot_ly()
fig
<- fig %>% add_trace(
type = 'scatterternary',
mode = 'markers',
a = ~journalist,
b = ~developer,
c = ~designer,
text = ~label,
marker = list(
symbol = 100,
color = '#DB7365',
size = 14,
line = list('width' = 2)
)
)
fig
<- fig %>% layout(
title = "Simple Ternary Plot with
Markers",
ternary = list(
sum = 100,
aaxis = axis('Journalist'),
baxis = axis('Developer'),
caxis = axis('Designer')
)
)
fig
df
<- data.frame(a, b, c, d)
ggtern(df,aes(a,c,b))
+
geom_interpolate_tern(aes(value=d,fill=..level..),
binwidth=500,
colour="white") +
geom_point(aes(fill=d),color="black",shape=21,size=3) +
scale_fill_gradient(low="yellow",high="red") +
theme(legend.position=c(0,1),legend.justification=c(0,1)) +
labs(fill="Value, d")
library('Ternary')
TernaryPlot('a',
b', 'c',
grid.lines=5,
grid.lty='dotted',
grid.minor.lines=1,
grid.minor.lty='dotted',
point='West')
#https://stackoverflow.com/questions/27570221/how-to-make-a-ggplot2-contour-plot-analogue-to-latticefilled-contour
#define
data
x<-seq(1,11,1)
y<-seq(1,11,1)
xyz.func<-function(x,y)
{-10.4+6.53*x+6.53*y-0.167*x^2-0.167*y^2+0.0500*x*y}
#contour
plot using lattice graphics and R Color Brewer
library(lattice)
#for filled.contour()
library(RColorBrewer)
#for brewer.pal()
z.lattice<-outer(x,y,xyz.func)
filled.contour(x,y,z.lattice,nlevels=6,col=brewer.pal(6,"YlOrRd"))
#http://rrubyperlundich.blogspot.com/2011/07/r-filledcontour-plot.html
x
<- y <- 1:10 # create two vectors with the integers from 1 to 10
z
<- outer(x,y) # create a matrix as the outer product of the two vectors
filled.contour(z)
filled.contour(x=x,y=y,z,
col=heat.colors(20))
my.seq
<- seq(-pi, pi, length=50) # creating a vector as a sequence from 0 to 2*pi
with 50 entries
my.seq2
<- seq(-0.5*pi, 1.5*pi, length=50) # creating a vector as a sequence from 0
to 2*pi with 50 entries
my.matrix
<- outer(sin(my.seq),cos(my.seq2)) # creating the matrix using sin, cos and
outer
filled.contour(x=my.seq,
y=my.seq2,my.matrix, plot.title=title(main="Products Sin(x)*Cos(y) on
[-Pi,Pi]x[-0.5Pi,1.5Pi]", xlab="x", ylab="y"),
key.title=title(main="products"), col=terrain.colors(20))
#https://stackoverflow.com/questions/15869969/adding-a-color-key-to-contour-plots-in-r
library(fields)
image.plot(volcano)
contour(volcano, add =
TRUE)
#http://www.r-qualitytools.org/IMPROVE.html
#install.packages("qualityTools
")
effectPlot(fdo,
classic = TRUE)
interactionPlot(fdo)
require(qualityTools)
par(mfrow
= c(1,2))
wirePlot(A,
B, yield, data = fdo)
contourPlot(A,
B, yield, data = fdo)
#NEW
GRAPH ON CORRELATION RANK USING R
#https://datascienceplus.com/find-insights-with-ranked-cross-correlations/
#Ranked
Cross-Correlations
#devtools::install_github("laresbernardo/lares")
library(lares)
library(dplyr)
#data("starwars")
#df
<- select(starwars, -starships, -vehicles, -films)
df=c(cIC,cFDC,cCF,cCV,cDR,cITR,cIFR,cFCF,cLFT,cDG,cGHG)
corr_cross(df)
data(dft)
#
Let's get rid of some noisy columns first
dft <- select(dft, -Cabin,
-Ticket)
corr_cross(dft, top = 15)
#
Prediction
of artificial neural network of class nn
library(neuralnet)
# Split data
train_idx <- sample(nrow(iris),
2/3 * nrow(iris))
iris_train <-
iris[train_idx, ]
iris_test
<- iris[-train_idx, ]
# Binary classification
nn <- neuralnet(Species ==
"setosa" ~ Petal.Length + Petal.Width, iris_train, linear.output =
FALSE)
pred <- predict(nn, iris_test)
table(iris_test$Species ==
"setosa", pred[, 1] > 0.5)
# Multiclass classification
nn <- neuralnet((Species ==
"setosa") + (Species == "versicolor") + (Species ==
"virginica") ~
Petal.Length + Petal.Width, iris_train, linear.output = FALSE)
pred <- predict(nn, iris_test)
table(iris_test$Species,
apply(pred, 1, which.max))
#
#https://search.r-project.org/CRAN/refmans/neuralnet/html/predict.nn.html
#NEURALINK
DEEP LEARNING R :
# install package
require("neuralnet")
# creating training data set
TKS=c(20,10,30,20,80,30)
CSS=c(90,20,40,50,50,80)
Placed=c(1,0,0,0,1,1)
# Here, you will combine multiple
columns or features into a single set of data
df=data.frame(TKS,CSS,Placed)
# load library
require(neuralnet)
# fit neural network
nn=neuralnet(Placed~TKS+CSS,data=df, hidden=3,act.fct =
"logistic",
linear.output = FALSE)
# plot neural network
plot(nn)
# creating test set
TKS=c(30,40,85)
CSS=c(85,50,40)
test=data.frame(TKS,CSS)
## Prediction using neural network
Predict=compute(nn,test)
Predict$net.result
# Converting probabilities into
binary classes setting threshold level 0.5
prob <- Predict$net.result
pred <- ifelse(prob>0.5, 1,
0)
pred
#
https://www.datacamp.com/tutorial/neural-network-models-r
#
https://datascienceplus.com/neuralnet-train-and-test-neural-networks-using-r/
#
# RELATIVE IMPACT /
RELATIVE CHANGE
rim=lm(LCOEs~log(sCC,
base=10)+log(sCFOM, base =10)+log(sCVOM, base=10)+log(sDR, base=10))
summary(rim)
Initial_Cost= 0.1247363
Fix_OM= 0.0348737
Variable_OM= 0.0552038
Discount_rate= -0.0002596
plot(0,0,
xlim=c(0,0.12),ylim=c(0,1), xaxt="n",yaxt="n",xlab=NA,
ylab=NA)
grid(lty=1, col="grey");
par(new=TRUE)
barplot(c(Initial_Cost,Fix_OM,Variable_OM,Discount_rate), col=2:5,
horiz=T,
beside=T, plot = T,
legend.text=c("Initial_Cost", "Fix_OM",
"Variable_OM", "Discount_rate"))
#CUMULATIVE CASH FLOW
r=read.table(file("clipboard"), header=T, sep="\t",
dec=",")
str(r); attach(r); names(r)
DRc= ((1+r)/(1+i))-1; DRc
#C=(1477*100000*CRF)+
(58*100000)+(0.025*100000*8760); C
PVC=718094394
PVB=0.11*(E)
CF= PVB-PVC
NPV=CF/((1+DRc)^t)
#https://www.journaldev.com/39620/how-to-create-an-area-plot-in-r-using-ggplot2#:~:text=The%20area%20graphs%20are%20the,points%20using%20the%20ggplot2%20package.
library(ggplot2)
library(hrbrthemes)
#reading data into data frames
data<- data.frame(Y, CCF)
xdata=Y;ydata=CCF
#plots the area chart with theme,
title and labels
ggplot(data, aes(x=xdata,
y=ydata))+
geom_area(fill='#142F86',
alpha=1)+
geom_line(color='skyblue',
size=1)+
geom_point(size=1, color='blue')+
ggtitle("Area plot using
ggplot2 in R")+
labs(x='Value', y='frequency')+
theme_ipsum()
#CONTOUR AND TRIANGULAR PLOT
#https://plotly.com/r/contour-plots/
#df=df$z[df$z>800];df=df$z[df$z<2400]
datac=data.frame(sDR, sCFOM, sCC);
View(datac)
x=x #DR
y=y
#FOM
z=z
#CC
df <- data.frame(x=x,y=y,z=z)
#p <- plot_ly(data = df,
x=~x,y=~y, z=~z, type = "scatter3d", colorscale='Jet');p
#p <- plot_ly( x = x, y =y,
z = z, type = "contour"
);p
library(plotly)
p <- plot_ly(data = df,
x=~x,y=~y, z=~z, type = "histogram2dcontour", colorscale='Jet');p
p <- plot_ly(data =
df, x=~x,y=~y, z=~z, type = "contour", colorscale='Jet');p
fig <- plot_ly( x = x, y = y,
z = z, type = "contour"
); fig
#
EMISSION EMISSION AND ENERGY SAVED
rm(list=ls())
rm(r)
#data=write.table(datac,
"data.txt", sep="\t")
r=read.table(file("clipboard"), header=T, sep="\t",
dec=",")
str(r); attach(r); names(r)
#OTHER DISTRIBUTION FORM OF WEIBULL
data=read.table(file("clipboard"), header=T,
sep="\t", dec=".")
str(data)
data=data.frame(data)
library(ggfan)
plot(ws, type="l")
# add fan
fan(data, start=1,
anchor=ws[time(ws)==2],
type="interval",
probs=seq(5, 95, 5), ln=c(50, 80))
library(ggplot2)
ggplot(fake_df, aes(x=x,y=y))
+geom_fan()
ggplot(data, aes(x=month,y=ws))
+geom_fan()
#use precomputed quantiles -
reducing storage requirements.
intervals = 1:19/20
fake_q <- calc_quantiles(fake_df,
intervals=intervals)
fake_q <- calc_quantiles(data,
intervals=intervals)
#intervals in geom_fan must be the
same as used to compute quantiles.
p<-ggplot(fake_q,
aes(x=x,y=y, quantile=quantile)) +
geom_fan(intervals=intervals)
p
+ theme_bw()
# change the colour scale
p1=ggplot(fake_df, aes(x=x,y=y)) +
geom_fan() + scale_fill_gradient(low="red", high="pink")
p1
+ theme_bw()
library("zoo")
library("tsbugs")
library("fanplot")
plot(NULL,
main="Percentiles", xlim = c(1, 965), ylim = c(-2.5, 1.5))
fan(data = th.mcmc)
# plot
fan0(data = th.mcmc, type =
"interval", ln = c(0.5, 0.8, 0.95),
llab = TRUE, rcex = 0.6)
fan0(data = data$ws, type =
"interval", ln = c(0.5, 0.95),
llab = TRUE, rcex = 0.6)
x=data$month
y=data$ws
# Main Plot
#ylim <- c(min(y)-interval,
max(y)+interval) # account for CI when determining ylim
#plot(x, y, type="b",
pch=20, lwd=2, ylim=ylim, col="blue", xlab="Month",
ylab="Wind Speed (m/s)") # plot x and y
par(new=TRUE)
plot(x, y, type="b",
pch=19, lwd=2, col="blue", xlab="Month", ylab="Wind
Speed (m/s)") # plot x and y
grid(lty=1, col="grey")
# Values for Fake Data
#x <- 1:10 # x values
#y <- (x-1)*0.5 +
rnorm(length(x), mean=0, sd=s.e.) # generate y values
# Area plot
# Values for noise and CI size
s.e. <- 0.25 # standard error of
noise
interval <- s.e.*qnorm(0.975) #
standard error * 97.5% quantile
# Determine the x values that will
go into CI
CI.x.top <- x # x values going
forward
CI.x.bot <- rev(x) # x values
backwards
CI.x <- c(CI.x.top, CI.x.bot) #
polygons are drawn clockwise
# Determine the Y values for CI
CI.y.top <- y+interval # top of
CI
CI.y.bot <- rev(y)-interval #
bottom of CI, but rev Y!
CI.y <- c(CI.y.top,CI.y.bot) #
forward, then backward
# Add a polygon for the CI
CI.col <-
adjustcolor("blue",alpha.f=0.25) # Pick a pretty CI color
polygon(CI.x, CI.y, col=CI.col,
border=NA) # draw the polygon
CI.col <-
adjustcolor("red",alpha.f=0.01) # Pick a pretty CI color
polygon(CI.x, CI.y, col=CI.col,
border=NA) # draw the polygon
# Add legend to explain what the
arrows are
legend("topleft",
legend="", xjust=0.5, bty="n")
#legend("topleft",
legend="Arrows indicate path\nfor drawing polygon", xjust=0.5,
bty="n")
dev.off()
# Point out path of polygon
arrows(CI.x.top[1],
CI.y.top[1]+0.1, CI.x.top[3], CI.y.top[3]+0.1)
arrows(CI.x.top[5],
CI.y.top[5]+0.1, CI.x.top[7], CI.y.top[7]+0.1)
arrows(CI.x.bot[1],
CI.y.bot[1]-0.1, CI.x.bot[3], CI.y.bot[3]-0.1)
arrows(CI.x.bot[6],
CI.y.bot[6]-0.1, CI.x.bot[8], CI.y.bot[8]-0.1)
ggplot(data = data, aes(x = month,
y =ws)) +
geom_line(aes(size = ws), color =
"#FC4E07")
x <- rrayleigh(1e5, 13)
hist(x, 100, freq = FALSE)
curve(drayleigh(x, 13), 0, 60, col
= "red", add = TRUE)
hist(prayleigh(x, 13))
plot(ecdf(x))
curve(prayleigh(x, 13), 0, 60, col
= "red", lwd = 2, add = TRUE)
#Uniform Distribution
runif(n=10, min=0, max=1)
#Normal Distribution
rnorm(n=10, mean=0, sd=1)
#Binomial Distribution
rbinom(n=10, size=5, prob=0.2)
#The log-normal Distribution
rlnorm(n=10, meanlog=0, sdlog=1)
#Weibull Distribution
hist(rweibull(emp, shape=kmm, scale
= cmm))
#Exponential Distribution
rexp(n=10, rate = 1)
#Poisson Distribution
rpois(n=10, lambda=1)
#Gamma
Distribution
rgamma(n=10, shape=1, rate = 1)
#Chisquare
Distribution
rchisq(n=10, df=3, ncp=1)
#where
df is degrees of freedom, and ncp is non-centrality parameter
require("DEEVD")
y<-rweibull(V_60,1)
h<-0.79 * IQR(y) * length(y) ^
(-1/5)
mseweibull(y,200,h,"Weibull")
Fn <- ecdf(weibullmm)
plot(Fn, verticals = TRUE,
col.points = "blue",col.hor = "red", col.vert =
"bisque")
plot(Fn, verticals = TRUE,
do.points = FALSE)
require(Hmisc)
Ecdf(weibullmm, xlab="")
scat1d(weibullmm) # add rug plot
histSpike(weibullmm, add=TRUE,
frac=.15) # add spike histogram
Ecdf(weibullmm,
datadensity='density') # add rug plot
ed= Ecdf(weibullmm)
d=data.frame(ed$x)
library(edfun)
set.seed(123);x <- rnorm(1000)
x_dist <- edfun(weibullmm)
f <- x_dist$dfun
curve(f, -2,2)
set.seed(123)
x <- rnorm(1000)
x_dist <- edfun(x)
f <- x_dist$dfun
curve(f, -2,2)
f <- x_dist$pfun
curve(f, -2,2)
require(EnvStats)
x <- qemp(p = seq(0, 1, len =
92065, obs = weibullmm) )
y <- demp(x, weibullmm)
require(mltools)
dt <- data.frame(x=c(0.3, 1.3,
1.4, 3.6), y=c(1.2, 1.2, 3.8, 3.9)); dt
empirical_cdf(dt$x, ubounds=seq(1,
4, by=1.0))
#empirical_cdf(dt,
ubounds=list(x=seq(1, 4, by=1.0)))
#empirical_cdf(dt,
ubounds=list(x=seq(1, 4, by=1.0), y=seq(1, 4, by=1.0)))