Compare commits

..

No commits in common. "177e3bc4c8914db021e6d0a3de6db8d5711ecd78" and "38fb9ca7d06f1215aa57e105a5be86a8b596a09d" have entirely different histories.

3 changed files with 1 additions and 83 deletions

View File

@ -7,6 +7,7 @@ from collections import UserList
from dataclasses import dataclass
from numbers import Number
from typing import Any, Callable, Iterable, List, Literal, Mapping, Sequence, Type
from unittest import skip
from dateutil.relativedelta import relativedelta

View File

@ -1,11 +1,9 @@
import datetime
import statistics
from typing import Literal
from fincal.core import date_parser
from .fincal import TimeSeries
from .utils import _interval_to_years
@date_parser(3, 4)
@ -23,13 +21,6 @@ def sharpe_ratio(
closest: Literal["previous", "next"] = "previous",
date_format: str = None,
):
interval_days = int(_interval_to_years(return_period_unit, return_period_value) * 365 + 1)
if from_date is None:
from_date = time_series_data.start_date + datetime.timedelta(days=interval_days)
if to_date is None:
to_date = time_series_data.end_date
if risk_free_data is None and risk_free_rate is None:
raise ValueError("At least one of risk_free_data or risk_free rate is required")
elif risk_free_data is not None:
@ -56,47 +47,3 @@ def sharpe_ratio(
sharpe_ratio_value = excess_returns / sd
return sharpe_ratio_value
@date_parser(2, 3)
def beta(
asset_data: TimeSeries,
market_data: TimeSeries,
from_date: str | datetime.datetime = None,
to_date: str | datetime.datetime = None,
frequency: Literal["D", "W", "M", "Q", "H", "Y"] = None,
return_period_unit: Literal["years", "months", "days"] = "years",
return_period_value: int = 1,
as_on_match: str = "closest",
prior_match: str = "closest",
closest: Literal["previous", "next"] = "previous",
date_format: str = None,
):
interval_days = int(_interval_to_years(return_period_unit, return_period_value) * 365 + 1)
if from_date is None:
from_date = asset_data.start_date + datetime.timedelta(days=interval_days)
if to_date is None:
to_date = asset_data.end_date
common_params = {
"from_date": from_date,
"to_date": to_date,
"frequency": frequency,
"return_period_unit": return_period_unit,
"return_period_value": return_period_value,
"as_on_match": as_on_match,
"prior_match": prior_match,
"closest": closest,
"date_format": date_format,
}
asset_rr = asset_data.calculate_rolling_returns(**common_params)
market_rr = market_data.calculate_rolling_returns(**common_params)
cov = statistics.covariance(asset_rr.values, market_rr.values)
market_var = statistics.variance(market_rr.values)
beta = cov / market_var
return beta

View File

@ -355,24 +355,6 @@ class TestReadCsv:
class TestTransform:
def test_daily_to_weekly(self, create_test_data):
ts_data = create_test_data(AllFrequencies.D, num=782, skip_weekends=True)
ts = TimeSeries(ts_data, "D")
tst = ts.transform("W", "mean")
assert isinstance(tst, TimeSeries)
assert len(tst) == 157
assert "2017-01-30" in tst
assert tst.iloc[4] == (datetime.datetime(2017, 1, 30), 1021.19)
def test_daily_to_monthly(self, create_test_data):
ts_data = create_test_data(AllFrequencies.D, num=782, skip_weekends=False)
ts = TimeSeries(ts_data, "D")
tst = ts.transform("M", "mean")
assert isinstance(tst, TimeSeries)
assert len(tst) == 26
assert "2018-01-01" in tst
assert round(tst.iloc[12][1], 2) == 1146.1
def test_daily_to_yearly(self, create_test_data):
ts_data = create_test_data(AllFrequencies.D, num=782, skip_weekends=True)
ts = TimeSeries(ts_data, "D")
@ -404,18 +386,6 @@ class TestTransform:
tst = ts.transform("Y", "mean")
assert "2019-01-01" in tst
assert round(tst.iloc[2][1], 2) == 1054.50
with pytest.raises(ValueError):
ts.transform("D", "mean")
def test_monthly_to_qty(self, create_test_data):
ts_data = create_test_data(AllFrequencies.M, num=36)
ts = TimeSeries(ts_data, "M")
tst = ts.transform("Q", "mean")
assert len(tst) == 12
assert "2018-10-01" in tst
assert tst.iloc[7] == (datetime.datetime(2018, 10, 1), 1021.19)
with pytest.raises(ValueError):
ts.transform("M", "sum")
class TestReturnsAgain: