Java: 實現自迴歸分析/線性迴歸分析/基金各項指標計算等
版權宣告:本文為博主原創文章,未經博主允許不得轉載。
需Jama矩陣運算庫.
java版原始碼:
包含自迴歸分析/線性迴歸分析/基金各項指標計算
import Jama.Matrix;
public class test {
public static void main(String[] args){
double rf = 1.0;
double[] rm = {1,2,3,4,3,2,5,6,8,8};
double[] rp = {2,2,3,4,4,3,5,6,9,8};
//線性迴歸
Linear l = linearRegression(rp,rm,rf);
System.out.println("線性迴歸");
System.out.println("alpha: "+l.alpha+"\nbeta: "+l.beta+"\nr2: "+l.rsquare);
//自迴歸
Autoregressive a = autoRegression(rp,2);
System.out.println("自迴歸");
//引數
for (double ci :a.ratios){
System.out.println("ratio: "+ci);
}
//擬合值
for (double ci :a.estimates){
System.out.println("estimates: "+ci);
}
//噪聲
for (double ci :a.noises){
System.out.println("noises: "+ci);
}
//噪聲均值
System.out.println("exp noises: "+exp(a.noises));
//噪聲方差
System.out.println("dev noises: "+dev(a.noises));
}
//求均值
static double exp(double[] rp){
int len = rp.length;
if (len > 0){
double output = 0.0;
for (double p: rp){
output +=p;
}
output /= len;
return output;
}else {
return -9999;
}
}
//求標準差
static double dev(double[] rp){
int len = rp.length;
if (len > 0){
double output = 0.0;
double exp = exp(rp);
for (double p: rp){
output += Math.pow((p -exp),2);
}
output = Math.sqrt(output/(len -1));
return output;
}else {
return -9999;
}
}
//求下行風險標準差
static double downRisk(double[] rp, double rf){
int len = rp.length;
if (len > 0){
double output = 0.0;
int count = 0;
for (double p: rp){
if (p < rf){
count ++;
output += Math.pow((p - rf),2);
}
}
if (count > 1){
output = Math.sqrt(output/(count -1));
return output;
}else {
System.out.println("益率小於無風險利率的天數剛好為1");
return -9999;
}
}else {
return -9999;
}
}
//求索提諾比率
static double sortinoRatio(double exp, double rf, double dr){
if (dr != 0){
return (exp - rf)/dr;
}else {
System.out.println("下行風險標準差有誤");
return -9999;
}
}
//求夏普比率
static double sharpRatio(double exp, double rf, double dp){
if (dp != 0){
return (exp - rf)/dp;
}else {
System.out.println("標準差為0");
return -9999;
}
}
//求線性迴歸 alpha beta R2
static Linear linearRegression(double[] rp,double[] rm,double rf){
Linear output = new Linear(-9999,-9999,-9999);
int len = rp.length;
int lenrm = rm.length;
if (len > 0){
if (len == lenrm){
double xexp = 0.0;
double yexp = 0.0;
double xsqura = 0.0;
double ysqura = 0.0;
double xy = 0.0;
for (int i = 0; i <len; i++){
double yi = rp[i] - rf;
double xi = rm[i] - rf;
xexp += xi;
yexp += yi;
xy += xi * yi;
xsqura += Math.pow(xi,2);
ysqura += Math.pow(yi,2);
}
xexp /= len;
yexp /= len;
double lxy = xy - len * xexp * yexp;
double lxx = xsqura - len * Math.pow(xexp,2);
double lyy = ysqura - len * Math.pow(yexp,2);
output.beta = lxy / lxx;
output.alpha = yexp - output.beta * xexp;
output.rsquare = Math.pow(lxy,2)/(lxx * lyy);
return output;
}else {
System.out.println("市場收益序列長度不匹配");
}
}else {
System.out.println("收益輸入為空");
}
return output;
}
//求詹森係數
static double jensen(double eRp, double eRm, double rf, double beta){
return eRp - rf - beta *(eRm - rf);
}
//求特雷諾係數
static double treynorRatio(double exp, double rf, double beta){
if (beta != 0){
return (exp - rf)/beta;
}else {
System.out.println("系統風險beta為0");
return -9999;
}
}
//自迴歸分析 求係數 擬合值 噪聲序列
static Autoregressive autoRegression(double[] rp, int p){
double[] op = {-9999};
Autoregressive output = new Autoregressive(op,op,op);
int len = rp.length;
if (len < 3 || p < 1 || len <= p +1){
System.out.println("輸入有誤");
}else {
int leny = len - p;
double[][] y = new double[leny][1];
for (int i = p; i <len; i ++){
y[i-p][0] = rp[i];
}
double[][] a = new double[leny][p];
for (int i = 0; i < leny ; i ++){
for (int j = 0 ; j < p; j ++){
a[i][j] = rp[p -1 - j + i];
}
}
Matrix mY = new Matrix(y);
Matrix mA = new Matrix(a);
Matrix mR = (mA.transpose().times(mA)).inverse().times(mA.transpose()).times(mY);
output.ratios = mR.getColumnPackedCopy();
Matrix mYhat = mA.times(mR);
output.estimates = mYhat.getColumnPackedCopy();
Matrix mNs = mY.minus(mYhat);
output.noises = mNs.getColumnPackedCopy();
}
return output;
}
}
class Linear {
double alpha;
double beta;
double rsquare;
public Linear(double alpha, double beta, double rsquare){
this.alpha = alpha;
this.beta = beta;
this.rsquare = rsquare;
}
}
class Autoregressive {
double[] ratios;
double[] estimates;
double[] noises;
public Autoregressive(double[] ratios, double[] estimates, double[] noises){
this.ratios = ratios;
this.noises = noises;
this.estimates = estimates;
}
}
結果示例:
=線性迴歸=
alpha: 0.5611510791366894
beta: 0.9496402877697845
r2: 0.9568894502718442
=自迴歸=
引數
ratio: 0.5716829919857536
ratio: 0.7043633125556548
估計值
estimates: 2.5520926090828167
estimates: 3.12377560106857
estimates: 4.399821905609978
estimates: 5.104185218165633
estimates: 4.532502226179879
estimates: 4.971504897595732
estimates: 6.951914514692795
estimates: 9.37132680320571
噪聲
noises: 0.44790739091718335
noises: 0.8762243989314298
noises: -0.3998219056099783
noises: -2.1041852181656333
noises: 0.4674977738201207
noises: 1.0284951024042677
noises: 2.0480854853072046
noises: -1.3713268032057098
噪聲均值
exp noises: 0.12410952804986058
噪聲方差
dev noises: 1.3514101374682685
相關文章
- 資料分析:線性迴歸
- 線性迴歸-如何對資料進行迴歸分析
- python實現線性迴歸之簡單迴歸Python
- pytorch實現線性迴歸PyTorch
- TensorFlow實現線性迴歸
- spss迴歸分析的基本步驟 spss線性迴歸怎麼做SPSS
- 線性迴歸——lasso迴歸和嶺迴歸(ridge regression)
- 線性迴歸與邏輯迴歸邏輯迴歸
- 線性迴歸實戰
- 線性迴歸
- 線性迴歸 go 語言實現Go
- Pytorch 實現簡單線性迴歸PyTorch
- 1.3 - 線性迴歸
- 前言以及迴歸分析
- 迴歸分析類別
- 【pytorch_5】線性迴歸的實現PyTorch
- 大資料分析筆記 (4.1) - 線性迴歸分析(Linear Regression)大資料筆記
- Python學習筆記-StatsModels 統計迴歸(1)線性迴歸Python筆記
- 對比線性迴歸、邏輯迴歸和SVM邏輯迴歸
- 迴歸模型-評估指標模型指標
- 線性迴歸推導
- 4-線性迴歸
- 1維線性迴歸
- 線性迴歸總結
- 多元線性迴歸模型模型
- 機器學習 | 線性迴歸與邏輯迴歸機器學習邏輯迴歸
- 線性迴歸—求解介紹及迴歸擴充套件套件
- [機器學習實戰-Logistic迴歸]使用Logistic迴歸預測各種例項機器學習
- 【機器學習】線性迴歸python實現機器學習Python
- 【機器學習】線性迴歸sklearn實現機器學習
- 【深度學習 01】線性迴歸+PyTorch實現深度學習PyTorch
- 線性迴歸:最小二乘法實現
- Yann LeCun說自迴歸要完,但DeepMind這篇論文卻證明自迴歸能實現通用計算Yann LeCun
- 機器學習(六):迴歸分析——鳶尾花多變數回歸、邏輯迴歸三分類只用numpy,sigmoid、實現RANSAC 線性擬合機器學習變數邏輯迴歸Sigmoid
- spark-mlib線性迴歸Spark
- 線性迴歸-程式碼庫
- PRML 迴歸的線性模型模型
- 線性迴歸演算法演算法
- 【小白學AI】線性迴歸與邏輯迴歸(似然引數估計)AI邏輯迴歸