Skip to content

Commit

Permalink
replace
Browse files Browse the repository at this point in the history
  • Loading branch information
plusbang committed Feb 17, 2023
1 parent 79ad3df commit 7f56535
Show file tree
Hide file tree
Showing 7 changed files with 105 additions and 12 deletions.
46 changes: 46 additions & 0 deletions .github/workflows/chronos-prvn-python-spark-2.4-py37.yml
Original file line number Diff line number Diff line change
Expand Up @@ -209,3 +209,49 @@ jobs:
env:
BIGDL_ROOT: ${{ github.workspace }}
ANALYTICS_ZOO_ROOT: ${{ github.workspace }}

chronos-PRVN-py38-test:
runs-on: [ self-hosted, Gondolin, ubuntu-20.04-lts ]
strategy:
fail-fast: false
matrix:
python-version: ["3.8"]
steps:
- uses: actions/checkout@v2
- name: Set up JDK 8
uses: ./.github/actions/jdk-setup-action
- name: Set up Maven
uses: ./.github/actions/maven-setup-action

- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}

- name: Run Chronos python test
shell: bash
run: |
conda remove -n chronos-prvn-env -y --all
conda create -n chronos-prvn-env -y python==3.8 setuptools==58.0.4 -c ${GONDOLIN_CONDA_CHANNEL} --override-channels
source activate chronos-prvn-env
apt-get update
apt-get install -y libgl1
apt-get install patchelf
pip uninstall -y bigdl-friesian bigdl-friesian-spark3 bigdl-dllib bigdl-dllib-spark3 bigdl-orca pyspark bigdl-orca-spark3 bigdl-chronos bigdl-chronos-spark3 bigdl-nano bigdl-friesian bigdl-friesian-spark3
wget https://raw.githubusercontent.com/analytics-zoo/gha-cicd-env/main/python-requirements/requirements-chronos-python-ut.txt -O ${{ github.workspace }}/requirements-chronos-python-ut.txt
pip install -i ${GONDOLIN_PIP_MIRROR} --trusted-host ${GONDOLIN_TRUSTED_HOST} -r ${{ github.workspace }}/requirements-chronos-python-ut.txt
bash python/dev/release_default_linux_spark246.sh default false false
bash python/nano/dev/build_and_install.sh linux default false pytorch --force-reinstall --no-deps -U
whl_name=`ls python/nano/dist/`
pip install -i https://pypi.python.org/simple python/nano/dist/${whl_name}[tensorflow_27,pytorch,inference]
pip install -i https://pypi.python.org/simple python/dllib/src/dist/bigdl_dllib-*-py3-none-manylinux1_x86_64.whl
pip install -i https://pypi.python.org/simple python/orca/src/dist/bigdl_orca-*-py3-none-manylinux1_x86_64.whl
pip install -i https://pypi.python.org/simple python/chronos/src/dist/bigdl_chronos-*-py3-none-manylinux1_x86_64.whl
export SPARK_LOCAL_HOSTNAME=localhost
export KERAS_BACKEND=tensorflow
bash python/chronos/dev/test/run-installation-options.sh "torch and not distributed and not diff_set_all"
source deactivate
conda remove -n chronos-prvn-env -y --all
env:
BIGDL_ROOT: ${{ github.workspace }}
ANALYTICS_ZOO_ROOT: ${{ github.workspace }}
47 changes: 47 additions & 0 deletions .github/workflows/chronos-prvn-python-spark-3.1-py37.yml
Original file line number Diff line number Diff line change
Expand Up @@ -213,3 +213,50 @@ jobs:
env:
BIGDL_ROOT: ${{ github.workspace }}
ANALYTICS_ZOO_ROOT: ${{ github.workspace }}

chronos-PRVN-py38-test:
runs-on: [ self-hosted, Gondolin, ubuntu-20.04-lts ]
strategy:
fail-fast: false
matrix:
python-version: ["3.8"]
steps:
- uses: actions/checkout@v2
- name: Set up JDK 8
uses: ./.github/actions/jdk-setup-action
- name: Set up Maven
uses: ./.github/actions/maven-setup-action

- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}

- name: Run Chronos python test
shell: bash
run: |
conda remove -n chronos-prvn-env -y --all
conda create -n chronos-prvn-env -y python==3.8 setuptools==58.0.4 -c ${GONDOLIN_CONDA_CHANNEL} --override-channels
source activate chronos-prvn-env
apt-get update
apt-get install -y libgl1
apt-get install patchelf
pip uninstall -y bigdl-friesian bigdl-friesian-spark3 bigdl-dllib bigdl-dllib-spark3 bigdl-orca pyspark bigdl-orca-spark3 bigdl-chronos bigdl-chronos-spark3 bigdl-nano bigdl-friesian bigdl-friesian-spark3
sed -i "s/pyspark==2.4.6/pyspark==3.1.3/g" python/dllib/src/setup.py
wget https://raw.githubusercontent.com/analytics-zoo/gha-cicd-env/main/python-requirements/requirements-chronos-python-ut.txt -O ${{ github.workspace }}/requirements-chronos-python-ut.txt
pip install -i ${GONDOLIN_PIP_MIRROR} --trusted-host ${GONDOLIN_TRUSTED_HOST} -r ${{ github.workspace }}/requirements-chronos-python-ut.txt
bash python/dev/release_default_linux_spark313.sh default false false
bash python/nano/dev/build_and_install.sh linux default false pytorch --force-reinstall --no-deps -U
whl_name=`ls python/nano/dist/`
pip install -i https://pypi.python.org/simple python/nano/dist/${whl_name}[tensorflow_27,pytorch,inference]
pip install -i https://pypi.python.org/simple python/dllib/src/dist/bigdl_dllib_*-py3-none-manylinux1_x86_64.whl
pip install -i https://pypi.python.org/simple python/orca/src/dist/bigdl_orca_*-py3-none-manylinux1_x86_64.whl
pip install -i https://pypi.python.org/simple python/chronos/src/dist/bigdl_chronos_*-py3-none-manylinux1_x86_64.whl
export SPARK_LOCAL_HOSTNAME=localhost
export KERAS_BACKEND=tensorflow
bash python/chronos/dev/test/run-installation-options.sh "torch and not distributed and not diff_set_all"
source deactivate
conda remove -n chronos-prvn-env -y --all
env:
BIGDL_ROOT: ${{ github.workspace }}
ANALYTICS_ZOO_ROOT: ${{ github.workspace }}
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def get_roll_start_idx(df, id_col, window_size):
id_start_idxes = df.index[df[id_col] != df[id_col].shift(1)].tolist() + [len(df.index)]
roll_start_idx_iter = ((range(id_start_idxes[i], id_start_idxes[i+1] - window_size + 1))
for i in range(len(id_start_idxes) - 1))
roll_start_idxes = np.fromiter(itertools.chain.from_iterable(roll_start_idx_iter), np.int)
roll_start_idxes = np.fromiter(itertools.chain.from_iterable(roll_start_idx_iter), np.int32)
return roll_start_idxes


Expand Down
16 changes: 8 additions & 8 deletions python/chronos/src/bigdl/chronos/model/tcmf/time.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,45 +59,45 @@ def __init__(self, start_date, num_ts=100, freq="H", normalized=True):
self.dti = pd.date_range(self.start_date, periods=self.num_ts, freq=self.freq)

def _minute_of_hour(self):
minutes = np.array(self.dti.minute, dtype=np.float)
minutes = np.array(self.dti.minute, dtype=np.float32)
if self.normalized:
minutes = minutes / 59.0 - 0.5
return minutes

def _hour_of_day(self):
hours = np.array(self.dti.hour, dtype=np.float)
hours = np.array(self.dti.hour, dtype=np.float32)
if self.normalized:
hours = hours / 23.0 - 0.5
return hours

def _day_of_week(self):
dayWeek = np.array(self.dti.dayofweek, dtype=np.float)
dayWeek = np.array(self.dti.dayofweek, dtype=np.float32)
if self.normalized:
dayWeek = dayWeek / 6.0 - 0.5
return dayWeek

def _day_of_month(self):
dayMonth = np.array(self.dti.day, dtype=np.float)
dayMonth = np.array(self.dti.day, dtype=np.float32)
if self.normalized:
dayMonth = dayMonth / 30.0 - 0.5
return dayMonth

def _day_of_year(self):
dayYear = np.array(self.dti.dayofyear, dtype=np.float)
dayYear = np.array(self.dti.dayofyear, dtype=np.float32)
if self.normalized:
dayYear = dayYear / 364.0 - 0.5
return dayYear

def _month_of_year(self):
monthYear = np.array(self.dti.month, dtype=np.float)
monthYear = np.array(self.dti.month, dtype=np.float32)
if self.normalized:
monthYear = monthYear / 11.0 - 0.5
return monthYear

def _week_of_year(self):
weekYear = np.array(pd.Int64Index(self.dti.isocalendar().week), dtype=np.float) if\
weekYear = np.array(pd.Int64Index(self.dti.isocalendar().week), dtype=np.float32) if\
version.parse(pd.__version__) >= version.parse("1.1.0") else\
np.array(self.dti.weekofyear, dtype=np.float)
np.array(self.dti.weekofyear, dtype=np.float32)
if self.normalized:
weekYear = weekYear / 51.0 - 0.5
return weekYear
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def _check_config(self, **config):
f"{lstm_name} should be int or an list/tuple of ints. "
f"Got {config[lstm_name]}")
if dropout_name in config:
if not check_iter_type(config[dropout_name], (float, np.float)):
if not check_iter_type(config[dropout_name], (float, np.float32)):
invalidInputError(False,
f"{dropout_name} should be float or a list/tuple of floats. "
f"Got {config[dropout_name]}")
Expand Down
2 changes: 1 addition & 1 deletion python/chronos/src/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def setup_package():
url='https://github.com/intel-analytics/BigDL',
packages=get_bigdl_packages(),
install_requires=['pandas>=1.0.5, <=1.3.5', 'scikit-learn>=0.22.0, <=1.0.2',
'bigdl-nano==' + VERSION, 'numpy<=1.23.5'],
'bigdl-nano==' + VERSION],
extras_require={'pytorch': ['bigdl-nano[pytorch]==' + VERSION],
'tensorflow': ['bigdl-nano[tensorflow_27]=='+VERSION],
'automl': ['optuna<=2.10.1', 'configspace<=0.5.0', 'SQLAlchemy<=1.4.27'],
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def generate_spark_df():
spark = SparkSession(sc)
rdd = sc.range(0, 100)
from pyspark.ml.linalg import DenseVector
df = rdd.map(lambda x: (DenseVector(np.random.randn(1, ).astype(np.float)),
df = rdd.map(lambda x: (DenseVector(np.random.randn(1, ).astype(np.float32)),
int(np.random.randint(0, 2, size=())),
int(x))).toDF(["feature", "id", "date"])
return df
Expand Down

0 comments on commit 7f56535

Please sign in to comment.