Skip to content

Commit

Permalink
Added init files
Browse files Browse the repository at this point in the history
  • Loading branch information
johnjasa committed Aug 25, 2020
1 parent 0bcbd40 commit 608a279
Show file tree
Hide file tree
Showing 9 changed files with 16 additions and 14 deletions.
1 change: 0 additions & 1 deletion ROSCO_toolbox/ROSCO_toolbox/sim.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
# speROSCO_cific language governing permissions and limitations under the License.

import numpy as np
from ROSCO_toolbox import turbine as ROSCO_turbine
import matplotlib.pyplot as plt
import sys

Expand Down
3 changes: 1 addition & 2 deletions ROSCO_toolbox/ROSCO_toolbox/turbine.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,6 @@
import matplotlib.pyplot as plt
import pandas as pd

from ROSCO_toolbox import utilities as ROSCO_utilities

# Some useful constants
now = datetime.datetime.now()
pi = np.pi
Expand Down Expand Up @@ -205,6 +203,7 @@ def load_from_fast(self, FAST_InputFile,FAST_directory, FAST_ver='OpenFAST',dev_
if rot_source == 'cc-blade': # Use cc-blade
self.load_from_ccblade()
elif rot_source == 'txt': # Use specified text file
from ROSCO_toolbox import utilities as ROSCO_utilities
file_processing = ROSCO_utilities.FileProcessing()
self.pitch_initial_rad, self.TSR_initial, self.Cp_table, self.Ct_table, self.Cq_table = file_processing.load_from_txt(
txt_filename)
Expand Down
1 change: 1 addition & 0 deletions ROSCO_toolbox/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
from .ROSCO_toolbox import control_interface, controller, sim, turbine, utilities
1 change: 1 addition & 0 deletions pCrunch/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
from .pCrunch import Analysis, CaseGen_Control, pdTools, Processing
9 changes: 6 additions & 3 deletions pCrunch/pCrunch/Analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,7 @@
import fatpack # 3rd party module used for rainflow counting

from scipy.interpolate import PchipInterpolator
from ROSCO_toolbox.utilities import FAST_IO

from pCrunch import pdTools

class Loads_Analysis(object):
'''
Expand Down Expand Up @@ -88,6 +86,7 @@ def full_loads_analysis(self, filenames, get_load_ranking=True, return_FastData=
list or dictionary containing OpenFAST output data
'''
# Load openfast data
from ROSCO_toolbox.utilities import FAST_IO
fast_io = FAST_IO()
fast_data = fast_io.load_FAST_out(filenames, tmin=self.t0, tmax=self.tf, verbose=self.verbose)

Expand Down Expand Up @@ -244,6 +243,7 @@ def load_ranking(self, stats, names=[], get_df=False):
pandas DataFrame containing load rankings
'''

from pCrunch import pdTools
# Make sure stats is in pandas df
if isinstance(stats, dict):
stats_df = pdTools.dict2df([stats], names=names)
Expand Down Expand Up @@ -468,6 +468,7 @@ def AEP(self, stats, windspeeds, U_pwr_curve=[], pwr_curve_vars=[]):
Annual energy production corresponding to
'''

from pCrunch import pdTools
# Make sure stats is in pandas df
if isinstance(stats, dict):
stats_df = pdTools.dict2df(stats)
Expand Down Expand Up @@ -574,6 +575,7 @@ def stat_curve(self, windspeeds, stats, plotvar, plottype, stat_idx=0, names=[])
ax: axes handle
'''

from pCrunch import pdTools
# Check for valid inputs
if isinstance(stats, dict):
stats_df = pdTools.dict2df(stats)
Expand Down Expand Up @@ -729,7 +731,8 @@ def plot_load_ranking(self, load_rankings, case_matrix, classifier_type,
# classifiers = list(set(cmw[classifier_type]))
# classifier_names = ['ROSCO', 'legacy']

# Check for valid inputs
from pCrunch import pdTools
# Check for valid inputs
if isinstance(load_rankings, dict):
load_ranking_df = pdTools.dict2df(load_rankings)
elif isinstance(load_rankings, list):
Expand Down
6 changes: 2 additions & 4 deletions pCrunch/pCrunch/Processing.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,6 @@
import matplotlib.pyplot as plt
import pandas as pd

from ROSCO_toolbox.utilities import FAST_IO

from pCrunch import Analysis, pdTools


class FAST_Processing(object):
'''
Expand Down Expand Up @@ -185,6 +181,7 @@ def batch_processing(self):

else:
# Initialize Analysis
from pCrunch import Analysis, pdTools
loads_analysis = Analysis.Loads_Analysis()
loads_analysis.verbose = self.verbose
loads_analysis.t0 = self.t0
Expand Down Expand Up @@ -279,6 +276,7 @@ def design_comparison(self, filenames):

fnames = np.array(filenames).T.tolist()
# Setup FAST_Analysis preferences
from pCrunch import Analysis, pdTools
loads_analysis = Analysis.Loads_Analysis()
loads_analysis.verbose=self.verbose
loads_analysis.t0 = self.t0
Expand Down
2 changes: 1 addition & 1 deletion pCrunch/pCrunch/pdTools.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
Some tools to ease management of batch analysis data in pandas
'''
import pandas as pd
from pCrunch import Processing

def dict2df(sumstats, names=None):
'''
Expand Down Expand Up @@ -77,6 +76,7 @@ def yaml2df(filename, names=[]):
'''

from pCrunch import Processing
data_dict = Processing.load_yaml('test.yaml', package=0)

level = data_dict
Expand Down
4 changes: 2 additions & 2 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,8 +123,8 @@ def build_extension(self, ext):
install_requires = ['openmdao>=3.2','numpy','scipy','pandas','simpy','marmot-agents','nlopt','dill','smt'],
classifiers = [_f for _f in CLASSIFIERS.split('\n') if _f],
package_dir = {'wisdem':'WISDEM/wisdem',
'ROSCO_toolbox.ROSCO_toolbox':'ROSCO_toolbox',
'pCrunch.pCrunch':'pCrunch',
# 'ROSCO_toolbox.ROSCO_toolbox':'ROSCO_toolbox',
# 'pCrunch.pCrunch':'pCrunch',
}, # weis doesn't need special directions
packages = weis_pkgs + wisdem_pkgs + roscotools_pkgs + pcrunch_pkgs,
python_requires = '>=3.6',
Expand Down
3 changes: 2 additions & 1 deletion weis/multifidelity/methods/trust_region.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,11 +100,12 @@ def find_next_point(self):
upper_bounds = np.minimum(trust_region_upper_bounds, self.bounds[:, 1])

bounds = list(zip(lower_bounds, upper_bounds))
scaled_function = lambda x: self.objective_scaler * self.approximation_functions[
scaled_function = lambda x: np.squeeze(self.objective_scaler * self.approximation_functions[
self.objective
](
x
)
)
res = minimize(
scaled_function,
x0,
Expand Down

0 comments on commit 608a279

Please sign in to comment.