@@ -130,27 +130,30 @@ jobs:
130130 - name : Install DACKAR Required Libraries
131131 run : |
132132 echo " Create dackar_libs"
133- conda install python=3.11
133+ conda install python=3.12
134134 echo " Conda information"
135135 conda info
136136 echo " Activate Dackar conda environment"
137- pip install spacy==3.5 textacy matplotlib nltk coreferee beautifulsoup4 networkx pysbd tomli numerizer autocorrect pywsd openpyxl quantulum3[classifier] numpy==1.26 scikit-learn pyspellchecker contextualSpellCheck pandas
137+ pip install spacy==3.5 stumpy textacy matplotlib nltk coreferee beautifulsoup4 networkx pysbd tomli numerizer autocorrect pywsd openpyxl quantulum3[classifier] numpy==1.26 scikit-learn pyspellchecker contextualSpellCheck pandas
138138 pip install neo4j jupyterlab
139139 pip install pytest
140- conda install -c conda-forge stumpy
141140 conda list
142141 which python
143-
144- # python -m spacy download en_core_web_lg [for some reason, GitHub machine complains this command]
145- # python -m spacy download en_core_web_lg
146- # pip install numba
147- - name : Download trained models
148- run : |
149142 pip install https://github.com/explosion/spacy-models/releases/download/en_core_web_lg-3.5.0/en_core_web_lg-3.5.0-py3-none-any.whl
150143 python -m coreferee install en
151144 python -m nltk.downloader all
152145 quantulum3-training -s
153146
147+ # python -m spacy download en_core_web_lg [for some reason, GitHub machine complains this command]
148+ # python -m spacy download en_core_web_lg
149+ # pip install numba
150+ # - name: Download trained models
151+ # run: |
152+ # pip install https://github.com/explosion/spacy-models/releases/download/en_core_web_lg-3.5.0/en_core_web_lg-3.5.0-py3-none-any.whl
153+ # python -m coreferee install en
154+ # python -m nltk.downloader all
155+ # quantulum3-training -s
156+
154157 - name : Test
155158 run : |
156159 cd tests
0 commit comments