diff --git a/RMTK.ipynb b/RMTK.ipynb
index 45f0d48..96d5ec5 100644
--- a/RMTK.ipynb
+++ b/RMTK.ipynb
@@ -29,14 +29,12 @@
"source": [
"---\n",
"## Vulnerability Module\n",
- "The *Vulnerability Module* of the Risk Modeller's Toolkit comprises a number of tools to generate a large number of numerical models (e.g. single degree of freedom oscillators (SDOF) or capacity curves establishing the relation between spectral acceleration and spectral displacement); for the conversion of results from multiple degree of freedom systems (MDOF) to equivalent single degree ocilators (SDOF) or for the transformation of the elastic displacement to inelastic response, which is fundamental to derive fragility or vulnerability models for single structures or building classes. In the following figure, a fragility model developed using one of algorithms supported by this toolkit is depicted: \n",
+ "The *Vulnerability Module* of the Risk Modeller's Toolkit comprises a number of tools to generate a large number of numerical models (e.g. single degree of freedom oscillators (SDOF) or capacity curves establishing the relation between spectral acceleration and spectral displacement), and to calculate the response of these numerical models against large sets of ground motion records. In the following figure, a fragility model developed using one of algorithms supported by this toolkit is depicted: \n",
"\n",
"\n",
"### Capacity model generator\n",
"This module enables users to generate a large number of numerical models that can be used in the derivation of fragility models. This feature allows the inclusion of the building-to-building variability necessary for the derivation of fragility functions for building classes. To use this feature click [here](notebooks/vulnerability/model_generator/model_generator.ipynb).\n",
"\n",
- "### Conversion from MDOF to SDOF\n",
- "Several structural analysis packages allow the derivation of pushover curves (base shear versus top displacement) for multiple degree of freedom systems (MDOF). This module allows the conversion of results from MDOF analyses into equivalent single degree of freedom (SDOF) models, thus making them compatible with a wide range of non-linear static procedures. To use this feature click [here](notebooks/vulnerability/mdof_to_sdof/mdof_to_sdof.ipynb).\n",
"\n",
"### Derivation of fragility and vulnerability models\n",
"This module comprises a wide spectrum methdologies to assess the nonlinear response of one, or multiple, structures using nonlinear static procedures. One of the main outputs of this module are fragility functions. Fragility functions obtained using this module can be further transformed into vulnerability functions using appropriate damage-to-loss (i.e. consequence) models. To use this feature click [here](notebooks/vulnerability/derivation_fragility/derivation_fragility.ipynb).\n"
@@ -76,7 +74,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
- "version": "3.5.4"
+ "version": "3.6.6"
}
},
"nbformat": 4,
diff --git a/notebooks/vulnerability/derivation_fragility/NLTHA_on_SDOF/NLTHA_on_SDOF.ipynb b/notebooks/vulnerability/derivation_fragility/NLTHA_on_SDOF/NLTHA_on_SDOF.ipynb
index 75c2936..f7489dd 100644
--- a/notebooks/vulnerability/derivation_fragility/NLTHA_on_SDOF/NLTHA_on_SDOF.ipynb
+++ b/notebooks/vulnerability/derivation_fragility/NLTHA_on_SDOF/NLTHA_on_SDOF.ipynb
@@ -2,12 +2,9 @@
"cells": [
{
"cell_type": "markdown",
- "metadata": {
- "deletable": true,
- "editable": true
- },
+ "metadata": {},
"source": [
- "# Non-Linear Time History Analysis (NLTHA) for Single Degree of Freedom (SDOF) Oscillators\n",
+ "# Non-Linear Time History Analysis (NLTHA) on Single Degree of Freedom (SDOF) Oscillators\n",
"\n",
"In this method, a single degree of freedom (SDOF) model of each structure is subjected to non-linear time history analysis (NLTHA) using a suite of ground motion records. The displacements of the SDOF due to each ground motion record are used as input to determine the distribution of buildings in each damage state for each level of ground motion intensity. A regression algorithm is then applied to derive the fragility model.\n",
"\n",
@@ -23,13 +20,21 @@
},
{
"cell_type": "code",
- "execution_count": null,
- "metadata": {
- "collapsed": false,
- "deletable": true,
- "editable": true
- },
- "outputs": [],
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [
+ {
+ "ename": "SyntaxError",
+ "evalue": "invalid syntax (utils.py, line 434)",
+ "output_type": "error",
+ "traceback": [
+ "Traceback \u001b[0;36m(most recent call last)\u001b[0m:\n",
+ " File \u001b[1;32m\"/Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/IPython/core/interactiveshell.py\"\u001b[0m, line \u001b[1;32m2961\u001b[0m, in \u001b[1;35mrun_code\u001b[0m\n exec(code_obj, self.user_global_ns, self.user_ns)\n",
+ "\u001b[0;36m File \u001b[0;32m\"\"\u001b[0;36m, line \u001b[0;32m1\u001b[0;36m, in \u001b[0;35m\u001b[0;36m\u001b[0m\n\u001b[0;31m from rmtk.vulnerability.common import utils\u001b[0m\n",
+ "\u001b[0;36m File \u001b[0;32m\"/Users/vitorsilva/Dropbox/GEM/git/gem/rmtk/rmtk/vulnerability/common/utils.py\"\u001b[0;36m, line \u001b[0;32m434\u001b[0m\n\u001b[0;31m print str((it+1)*100/len(T))+'%'\u001b[0m\n\u001b[0m ^\u001b[0m\n\u001b[0;31mSyntaxError\u001b[0m\u001b[0;31m:\u001b[0m invalid syntax\n"
+ ]
+ }
+ ],
"source": [
"from rmtk.vulnerability.common import utils\n",
"from rmtk.vulnerability.derivation_fragility.NLTHA_on_SDOF import NLTHA_on_SDOF\n",
@@ -39,11 +44,7 @@
},
{
"cell_type": "markdown",
- "metadata": {
- "collapsed": false,
- "deletable": true,
- "editable": true
- },
+ "metadata": {},
"source": [
"---\n",
"### Load capacity curves\n",
@@ -56,11 +57,7 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {
- "collapsed": false,
- "deletable": true,
- "editable": true
- },
+ "metadata": {},
"outputs": [],
"source": [
"capacity_curves_file = \"../../../../../rmtk_data/capacity_curves_point.csv\"\n",
@@ -72,9 +69,6 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
- "collapsed": false,
- "deletable": true,
- "editable": true,
"scrolled": false
},
"outputs": [],
@@ -87,10 +81,7 @@
},
{
"cell_type": "markdown",
- "metadata": {
- "deletable": true,
- "editable": true
- },
+ "metadata": {},
"source": [
"### Load ground motion records\n",
"\n",
@@ -105,9 +96,6 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
- "collapsed": false,
- "deletable": true,
- "editable": true,
"scrolled": false
},
"outputs": [],
@@ -120,10 +108,7 @@
},
{
"cell_type": "markdown",
- "metadata": {
- "deletable": true,
- "editable": true
- },
+ "metadata": {},
"source": [
"### Load damage state thresholds\n",
"\n",
@@ -135,11 +120,7 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {
- "collapsed": false,
- "deletable": true,
- "editable": true
- },
+ "metadata": {},
"outputs": [],
"source": [
"damage_model_file = \"../../../../../rmtk_data/damage_model.csv\"\n",
@@ -148,10 +129,7 @@
},
{
"cell_type": "markdown",
- "metadata": {
- "deletable": true,
- "editable": true
- },
+ "metadata": {},
"source": [
"### Obtain the damage probability matrix\n",
"\n",
@@ -164,9 +142,6 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
- "collapsed": false,
- "deletable": true,
- "editable": true,
"scrolled": false
},
"outputs": [],
@@ -179,10 +154,7 @@
},
{
"cell_type": "markdown",
- "metadata": {
- "deletable": true,
- "editable": true
- },
+ "metadata": {},
"source": [
"### Find an adequate intensity measure\n",
"\n",
@@ -193,9 +165,6 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
- "collapsed": false,
- "deletable": true,
- "editable": true,
"scrolled": false
},
"outputs": [],
@@ -207,10 +176,7 @@
},
{
"cell_type": "markdown",
- "metadata": {
- "deletable": true,
- "editable": true
- },
+ "metadata": {},
"source": [
"### Fit lognormal CDF fragility curves\n",
"\n",
@@ -224,9 +190,6 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
- "collapsed": false,
- "deletable": true,
- "editable": true,
"scrolled": false
},
"outputs": [],
@@ -239,10 +202,7 @@
},
{
"cell_type": "markdown",
- "metadata": {
- "deletable": true,
- "editable": true
- },
+ "metadata": {},
"source": [
"### Plot fragility functions\n",
"\n",
@@ -253,11 +213,7 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {
- "collapsed": false,
- "deletable": true,
- "editable": true
- },
+ "metadata": {},
"outputs": [],
"source": [
"minIML, maxIML = 0.0, 1.2\n",
@@ -267,10 +223,7 @@
},
{
"cell_type": "markdown",
- "metadata": {
- "deletable": true,
- "editable": true
- },
+ "metadata": {},
"source": [
"### Save fragility functions\n",
"\n",
@@ -283,11 +236,7 @@
{
"cell_type": "code",
"execution_count": null,
- "metadata": {
- "collapsed": false,
- "deletable": true,
- "editable": true
- },
+ "metadata": {},
"outputs": [],
"source": [
"taxonomy = \"RC\"\n",
@@ -300,9 +249,7 @@
{
"cell_type": "markdown",
"metadata": {
- "collapsed": true,
- "deletable": true,
- "editable": true
+ "collapsed": true
},
"source": [
"### Obtain vulnerability function\n",
@@ -319,9 +266,7 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
- "collapsed": true,
- "deletable": true,
- "editable": true
+ "collapsed": true
},
"outputs": [],
"source": [
@@ -337,10 +282,7 @@
},
{
"cell_type": "markdown",
- "metadata": {
- "deletable": true,
- "editable": true
- },
+ "metadata": {},
"source": [
"### Plot vulnerability function"
]
@@ -349,9 +291,6 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
- "collapsed": false,
- "deletable": true,
- "editable": true,
"scrolled": true
},
"outputs": [],
@@ -361,10 +300,7 @@
},
{
"cell_type": "markdown",
- "metadata": {
- "deletable": true,
- "editable": true
- },
+ "metadata": {},
"source": [
"### Save vulnerability function\n",
"\n",
@@ -377,9 +313,7 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
- "collapsed": true,
- "deletable": true,
- "editable": true
+ "collapsed": true
},
"outputs": [],
"source": [
@@ -392,23 +326,23 @@
],
"metadata": {
"kernelspec": {
- "display_name": "Python 2",
+ "display_name": "Python 3",
"language": "python",
- "name": "python2"
+ "name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
- "version": 2
+ "version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
- "pygments_lexer": "ipython2",
- "version": "2.7.13"
+ "pygments_lexer": "ipython3",
+ "version": "3.6.6"
}
},
"nbformat": 4,
- "nbformat_minor": 0
+ "nbformat_minor": 1
}
diff --git a/notebooks/vulnerability/derivation_fragility/derivation_fragility.ipynb b/notebooks/vulnerability/derivation_fragility/derivation_fragility.ipynb
index 2128d4b..eed9b2a 100644
--- a/notebooks/vulnerability/derivation_fragility/derivation_fragility.ipynb
+++ b/notebooks/vulnerability/derivation_fragility/derivation_fragility.ipynb
@@ -2,81 +2,16 @@
"cells": [
{
"cell_type": "markdown",
- "metadata": {
- "deletable": true,
- "editable": true
- },
+ "metadata": {},
"source": [
"# Derivation of fragility and vulnerability models\n",
"\n",
- "This module comprises a wide range of methodologies for the assessment of the nonlinear response of one or multiple structures using nonlinear static procedures. One of the main outputs of this module are fragility functions, which can be converted into vulnerability functions by employing a suitable damage-to-loss (i.e. consequence) model.\n",
- "\n",
- "**Note**: It is important to mention that the majority of the methodologies implemented in this module were not originally developed in order to derive fragility models, but rather for the assessment of the nonlinear structural response of individual structures. Therefore, their employment in the derivation of fragility models needs to be undertaken with due care, recognizing the inherent limitations of this approach.\n",
- "\n",
- "The figure below illustrates the different methodologies for the derivation of fragility functions implemented in this module.\n",
- "\n",
- ""
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "collapsed": false,
- "deletable": true,
- "editable": true
- },
- "source": [
- "---\n",
- "## 1 — Direct Nonlinear Static Procedures\n",
- "\n",
- "These methodologies estimate the median seismic intensity values corresponding to the attainment of the damage state of interest, taking into account record-to-record dispersion and dispersion in the damage state thresholds. The inelastic displacements are estimated from the maximum elastic displacements by means of pre-computed $R-\\mu-T$ relationships.\n",
- "\n",
- "### 1.1 - SPO2IDA (2006)\n",
- "This methodology uses the SPO2IDA tool (Vamvatsikos and Cornell, 2006) to convert static pushover curves into $16\\%$, $50\\%$, and $84\\%$ IDA curves. The procedure is applicable to any kind of multi-linear capacity curve and it is suitable for single-building fragility curve estimation. Individual fragility curves can later be combined into a single fragility curve that considers the inter-building uncertainty. To use this feature click [here](../../vulnerability/derivation_fragility/R_mu_T_dispersion/SPO2IDA/spo2ida.ipynb).\n",
- "\n",
- "### 1.2 - Ruiz-García and Miranda (2007)\n",
- "This methodology makes use of the work by Ruiz-García and Miranda (2007), where the inelastic displacement demand is related to the elastic displacement by a simple relationship. This procedure is suitable for single-building fragility curve estimation and is applicable to bilinear elasto-plastic capacity curves. Individual fragility curves can later be combined into a single fragility curve that considers the inter-building uncertainty. To use this feature click [here](../../vulnerability/derivation_fragility/R_mu_T_dispersion/ruiz_garcia_miranda/ruiz-garcia_miranda.ipynb).\n",
- "\n",
- "### 1.3 - Dolsek and Fajfar (2004)\n",
- "This methodology makes use of work by Dolsek and Fajfar (2004) to estimate the inelastic displacement of a SDOF system based on its elastic displacement and the proposed $R-\\mu-T$ relationship. Record-to-record dispersion from Ruiz-García and Miranda (2007) can be included in the derivation of fragility curves. It is suitable for single-building fragility curve estimation and is applicable to any kind of multi-linear capacity curves. Individual fragility curves can be later combined into a single fragility curve that considers inter-building uncertainty. To use this feature click [here](../../vulnerability/derivation_fragility/R_mu_T_no_dispersion/dolsek_fajfar/dolsek_fajfar.ipynb)."
+ "This module comprises methodologies for the assessment of the nonlinear response of one or multiple structures using nonlinear dynamic analysis. One of the main outputs of this module are fragility functions, which can be converted into vulnerability functions by employing a suitable damage-to-loss (i.e. consequence) model.\n"
]
},
{
"cell_type": "markdown",
- "metadata": {
- "deletable": true,
- "editable": true
- },
- "source": [
- "---\n",
- "## 2 — Record-based Nonlinear Static Procedures\n",
- "Record-based Nonlinear Static Procedures allow the User to input its own set of accelograms and estimate for each of them the inelastic target displacement of the structure. This can be achieved either multiplying the elastic spectral displacement by a displacement modification factor or by considering an equivalent linear system.\n",
- "\n",
- "### 2.1 - Vidic, Fajfar and Fischinger (1994)\n",
- "This procedure aims to determine the displacements from an inelastic design spectra for systems with a given ductility factor. The inelastic displacement spectra is determined by means of applying a reduction factor, which depends on the natural period of the system, its ductility factor, the hysteretic behaviour, the damping, and the frequency content of the ground motion. To use this feature click [here](../../vulnerability/derivation_fragility/equivalent_linearization/vidic_etal_1994/vidic_etal_1994.ipynb).\n",
- "\n",
- "### 2.2 - Miranda (2000) for firm soils \n",
- "This methodology aims to estimate the maximum lateral inelastic displacement demands on a structure based on the maximum lateral elastic displacement demands for sites with average shear-wave velocities higher than $180~m/s$. A reduction factor based on the displacement ductility ratio and the period of vibration of the system is used to estimate the inelastic displacements, which are then used as inputs to build the fragility model. To use this feature click [here](../../vulnerability/derivation_fragility/equivalent_linearization/miranda_2000_firm_soils/miranda_2000_firm_soils.ipynb).\n",
- "\n",
- "### 2.3 - Lin and Miranda (2008)\n",
- "This method estimates the maximum inelastic displacement of an existing structure based on the maximum elastic displacement response of its equivalent linear system without the need of iterations, based on the strength ratio. The equivalent linear system has a longer period of vibration and a higher viscous damping than the original system. The estimation of these parameters is based on the strength ratio $R$. To use this feature click [here](../../vulnerability/derivation_fragility/equivalent_linearization/lin_miranda_2008/lin_miranda_2008.ipynb).\n",
- "\n",
- "### 2.4 - Capacity Spectrum Method - FEMA-440 (2005)\n",
- "The Capacity Spectrum Method (CSM) is a procedure capable of estimating the nonlinear response of structures, utilizing overdamped response spectra. These response spectra can either be obtained from a building code or derived based on ground motion records. In this fragility method, the CSM is employed to test building portfolios against a set of ground motion records. To use this feature click [here](../../vulnerability/derivation_fragility/hybrid_methods/CSM/CSM.ipynb).\n",
- "\n",
- "### 2.5 - N2 - Eurocode 8, CEN (2005)\n",
- "This simplified nonlinear procedure for the estimation of the seismic response of structures uses capacity curves and inelastic spectra. This method has been developed to be used in combination with code-based response spectra, but it is also possible to employ it for the assessment of structural response subject to ground motion records. It also has the distinct aspect of assuming an elastic-perfectly plastic force-displacement relationship in the construction of the bilinear curve. To use this feature click [here](../../vulnerability/derivation_fragility/hybrid_methods/N2/N2.ipynb).\n",
- "\n",
- "### 2.6 - Displacement-based Earthquake Loss Assessment - Silva et al. (2013)\n",
- "In this fragility method, thousands of synthetic buildings can be produced considering probabilistic distributions for the variability in the geometrical and material properties. The nonlinear capacity can be estimated using the displacement-based earthquake loss assessment theory. In this process, the performance against a large set of ground motion records is been calculated. Global limit states are used to estimate the distribution of buildings in each damage state for different levels of ground motion, and a regression algorithm is applied to derive fragility functions for each limit state. To use this feature click [here](../../vulnerability/derivation_fragility/hybrid_methods/DBELA/DBELA.ipynb)."
- ]
- },
- {
- "cell_type": "markdown",
- "metadata": {
- "deletable": true,
- "editable": true
- },
+ "metadata": {},
"source": [
"---\n",
"## 3 — Nonlinear Time History Analysis of SDOF Oscillators\n",
@@ -98,9 +33,7 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
- "collapsed": true,
- "deletable": true,
- "editable": true
+ "collapsed": true
},
"outputs": [],
"source": []
@@ -108,23 +41,23 @@
],
"metadata": {
"kernelspec": {
- "display_name": "Python 2",
+ "display_name": "Python 3",
"language": "python",
- "name": "python2"
+ "name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
- "version": 2
+ "version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
- "pygments_lexer": "ipython2",
- "version": "2.7.13"
+ "pygments_lexer": "ipython3",
+ "version": "3.6.6"
}
},
"nbformat": 4,
- "nbformat_minor": 0
+ "nbformat_minor": 1
}
diff --git a/rmtk/vulnerability/common/utils.py b/rmtk/vulnerability/common/utils.py
index 671c62b..e4899f6 100644
--- a/rmtk/vulnerability/common/utils.py
+++ b/rmtk/vulnerability/common/utils.py
@@ -14,191 +14,7 @@ def read_capacity_curves(input_file):
with open(input_file, 'rU') as f:
data = csv.reader(f)
- for line in data:
- if line[0] == 'Vb-droof' and line[1] == 'TRUE':
- capacity_curves = read_Vbdroof_capacity_curves(data)
- break
- elif line[0] == 'Vb-dfloor' and line[1] == 'TRUE':
- capacity_curves = read_Vbdfloor_capacity_curves(data)
- break
- elif line[0] == 'Sd-Sa' and line[1] == 'TRUE':
- capacity_curves = read_SdSa_capacity_curves(data)
- break
-
- return capacity_curves
-
-def read_Vbdfloor_capacity_curves(data):
-
- # This function reads Vb-floor displacements type of capacity curves
- idealised = 'FALSE'
- periods = []
- ground_heights = []
- regular_heights = []
- gammas = []
- no_storeys = []
- weights = []
- Vb = []
- d_floor = []
- d_roof = []
- id_floor = []
- M_star = []
-
- for line in data:
- if line[0] == 'Idealised':
- idealised = line[1]
- if line[0] == 'Periods [s]':
- for value in line[1:]:
- if isNumber(value):
- periods.append(float(value))
- if line[0] == 'Ground heights [m]':
- for value in line[1:]:
- if isNumber(value):
- ground_heights.append(float(value))
- if line[0] == 'Regular heights [m]':
- for value in line[1:]:
- if isNumber(value):
- regular_heights.append(float(value))
- if line[0] == 'Gamma participation factors':
- for value in line[1:]:
- if isNumber(value):
- gammas.append(float(value))
- if line[0] == 'Effective modal masses [ton]':
- for value in line[1:]:
- if isNumber(value):
- M_star.append(float(value))
- if line[0] == 'Number storeys':
- for value in line[1:]:
- if isNumber(value):
- no_storeys.append(int(value))
- if line[0] == 'Weights':
- for value in line[1:]:
- if isNumber(value):
- weights.append(float(value))
- if line[0][0:6] == 'dfloor':
- subd_floor = []
- for value in line[1:]:
- if isNumber(value):
- subd_floor.append(float(value))
- id_floor.append(subd_floor)
- if len(id_floor) == no_storeys[-1]:
- d_floor.append(id_floor)
- d_roof.append(subd_floor)
- id_floor = []
- if line[0][0:2] == 'Vb':
- subVb = []
- for value in line[1:]:
- if isNumber(value):
- subVb.append(float(value))
- Vb.append(subVb)
-
- if not weights:
- weights = np.ones_like(periods)
- average_period = np.average(np.array(periods), weights=np.array(weights))
-
- # Store all the data in the dictionary
- capacity_curves = {'type': None, 'idealised': None, 'periods': None,
- 'mean_period': None, 'ground_heights': None,
- 'regular_heights': None, 'gamma': None, 'modal_mass': None,
- 'no_storeys': None, 'weight': None, 'dfloor': None,
- 'droof': None, 'Vb': None}
-
- capacity_curves['type'] = 'Vb-dfloor'
- capacity_curves['idealised'] = idealised[0]
- capacity_curves['periods'] = periods
- capacity_curves['mean_period'] = average_period
- capacity_curves['ground_heights'] = ground_heights
- capacity_curves['regular_heights'] = regular_heights
- capacity_curves['gamma'] = gammas
- capacity_curves['modal_mass'] = M_star
- capacity_curves['no_storeys'] = no_storeys
- capacity_curves['weights'] = weights
- capacity_curves['dfloor'] = d_floor
- capacity_curves['droof'] = d_roof
- capacity_curves['Vb'] = Vb
-
- return capacity_curves
-
-def read_Vbdroof_capacity_curves(data):
-
- # This function reads Vb-droof type of capacity curves
- idealised = 'FALSE'
- periods = []
- ground_heights = []
- regular_heights = []
- gammas = []
- no_storeys = []
- weights = []
- Vb = []
- d_roof = []
- M_star = []
-
- for line in data:
- if line[0] == 'Idealised':
- idealised = line[1]
- if line[0] == 'Periods [s]':
- for value in line[1:]:
- if isNumber(value):
- periods.append(float(value))
- if line[0] == 'Ground heights [m]':
- for value in line[1:]:
- if isNumber(value):
- ground_heights.append(float(value))
- if line[0] == 'Regular heights [m]':
- for value in line[1:]:
- if isNumber(value):
- regular_heights.append(float(value))
- if line[0] == 'Gamma participation factors':
- for value in line[1:]:
- if isNumber(value):
- gammas.append(float(value))
- if line[0] == 'Effective modal masses [ton]':
- for value in line[1:]:
- if isNumber(value):
- M_star.append(float(value))
- if line[0] == 'Number storeys':
- for value in line[1:]:
- if isNumber(value):
- no_storeys.append(int(value))
- if line[0] == 'Weights':
- for value in line[1:]:
- if isNumber(value):
- weights.append(float(value))
- if line[0][0:5] == 'droof':
- subd_roof = []
- for value in line[1:]:
- if isNumber(value):
- subd_roof.append(float(value))
- d_roof.append(subd_roof)
- if line[0][0:2] == 'Vb' and isNumber(line[0][2]):
- subVb = []
- for value in line[1:]:
- if isNumber(value):
- subVb.append(float(value))
- Vb.append(subVb)
-
- if not weights:
- weights = np.ones_like(periods)
- average_period = np.average(np.array(periods), weights = np.array(weights))
-
- # Store all the data in the dictionary
- capacity_curves = {'type': None, 'idealised': None, 'periods': None,
- 'ground_heights': None, 'regular_heights': None,
- 'gamma': None, 'no_storeys': None, 'modal_mass': None,
- 'weights': None,
- 'droof': None, 'Vb': None}
-
- capacity_curves['type'] = 'Vb-droof'
- capacity_curves['idealised'] = idealised
- capacity_curves['periods'] = periods
- capacity_curves['mean_period'] = average_period
- capacity_curves['ground_heights'] = ground_heights
- capacity_curves['regular_heights'] = regular_heights
- capacity_curves['gamma'] = gammas
- capacity_curves['modal_mass'] = M_star
- capacity_curves['no_storeys'] = no_storeys
- capacity_curves['weights'] = weights
- capacity_curves['droof'] = d_roof
- capacity_curves['Vb'] = Vb
+ capacity_curves = read_SdSa_capacity_curves(data)
return capacity_curves
@@ -337,55 +153,6 @@ def plot_capacity_curves(capacity_curves):
plt.legend(loc = 'lower right', frameon = False)
plt.show()
-def plot_idealised_capacity(idealised_capacity, capacity_curves, idealised_type):
- #This function plots the capacity curves
- no_capacity_curves = len(capacity_curves['periods'])
- if idealised_type == 'bilinear':
- for icc in range(no_capacity_curves):
-
- if capacity_curves['type']== 'Vb-droof' or capacity_curves['type']== 'Vb-dfloor':
- droof = capacity_curves['droof'][icc]
- Vb = capacity_curves['Vb'][icc]
- else:
- droof = capacity_curves['Sd'][icc]
- Vb = capacity_curves['Sa'][icc]
-
- Vb_idealised = [0, idealised_capacity[icc][2], idealised_capacity[icc][2]]
- droof_idealised = [0, idealised_capacity[icc][0], idealised_capacity[icc][1]]
- plt.plot(droof, Vb, color = 'g', linewidth = 2)
- plt.plot(droof_idealised, Vb_idealised, color = 'r', linewidth = 2)
-
- plt.plot(droof, Vb, color = 'g', linewidth = 2, label = 'capacity curve')
- plt.plot(droof_idealised, Vb_idealised, color = 'r', linewidth = 2,
- label = 'idealised capacity curve')
- plt.xlabel('Roof displacement [m]', fontsize = 10)
- plt.ylabel('Base shear [kN]', fontsize = 10)
-
- else:
- for icc in range(no_capacity_curves):
- if capacity_curves['type']== 'Vb-droof' or capacity_curves['type']== 'Vb-dfloor':
- droof = capacity_curves['droof'][icc]
- Vb = capacity_curves['Vb'][icc]
- else:
- droof = capacity_curves['Sd'][icc]
- Vb = capacity_curves['Sa'][icc]
- Vb_idealised = idealised_capacity[icc][4:]
- Vb_idealised.insert(0, 0)
- Vb_idealised.append(idealised_capacity[icc][-1])
- droof_idealised = idealised_capacity[icc][0:4]
- droof_idealised.insert(0, 0)
- plt.plot(droof, Vb, color = 'g', linewidth = 2)
- plt.plot(droof_idealised, Vb_idealised, color = 'r', linewidth = 2)
-
- plt.plot(droof, Vb, color = 'g', linewidth = 2, label = 'capacity curve')
- plt.plot(droof_idealised, Vb_idealised, color = 'r', linewidth = 2,
- label = 'idealised capacity curve')
- plt.xlabel('Roof displacement [m]', fontsize = 10)
- plt.ylabel('Base shear [kN]', fontsize = 10)
-
- plt.suptitle('Capacity curves')
- plt.legend(loc = 'lower right', frameon = False)
- plt.show()
def read_gmrs(folder):
@@ -431,13 +198,13 @@ def evaluate_optimal_IM(gmrs,PDM,minT,maxT,stepT,damage_model,damping_ratio,meth
T = np.arange(minT, maxT,stepT)
setRsquare = []
for it in range(len(T)):
- print str((it+1)*100/len(T))+'%'
+ print(str((it+1)*100/len(T))+'%')
fragility_model = calculate_mean_fragility(gmrs, PDM, T[it], damping_ratio, 'Sa', damage_model, method)
setRsquare.append(fragility_model['Rsquare'])
meanRsquare = np.mean(np.array(setRsquare),axis=1)
bestT = T[np.argmax(meanRsquare)]
- print 'The best damage-intensity correlation was achieved for Sa at T='+str(bestT)+'s'
+ print('The best damage-intensity correlation was achieved for Sa at T='+str(bestT)+'s')
plot_correlation(T,setRsquare,meanRsquare,damage_model,bestT)
def plot_correlation(T,setRsquare,meanRsquare,damage_model,bestT):
@@ -597,7 +364,7 @@ def read_damage_model(input_file):
line = data[iline+2]
damage_states.append(line[0])
if type_criteria == 'capacity curve dependent':
- print line
+ print(line)
type_damage_state.append(line[1])
distribution.append(line[2])
if isNumber(line[3]):
@@ -685,7 +452,7 @@ def define_limit_states(capacity_curves, icc, damage_model):
if Sdlim>duf: Sdlim = duf
limit_states.append(Sdlim)
- #print limit_states
+ #print(limit_states)
return limit_states
def define_limit_state(Sd, Sa, Sdy, Say, type_damage_state, distribution, mean, cov):
@@ -742,42 +509,6 @@ def sample_value(distribution, mean, cov, A, B):
return result
-def find_intersection(list1, list2, plot_flag):
-
- line1 = []
- for i in range(len(list1[0])):
- line1.append([list1[0][i], list1[1][i]])
-
- line2 = []
- for i in range(len(list2[0])):
- line2.append([list2[0][i], list2[1][i]])
-
- curve1 = LineString(line1)
- curve2 = LineString(line2)
- intersection = curve1.intersection(curve2)
-
- Sdi = []
- Sai = []
- if not intersection.is_empty:
- if isinstance(intersection, Point):
- Sdi.append(intersection.x)
- Sai.append(intersection.y)
- elif isinstance(intersection, MultiPoint):
- for points in intersection:
- coords = points.coords
- for xy in coords:
- Sdi.append(xy[0])
- Sai.append(xy[1])
-
- if plot_flag:
- plt.plot(list1[0], list1[1], color = 'r', linewidth = 2)
- plt.plot(list2[0], list2[1], color = 'b', linewidth = 2)
- plt.xlabel('Spectral displacement', fontsize = 10)
- plt.ylabel('Spectral acceleration', fontsize = 10)
- plt.plot(Sdi, Sai, 'ro', color = 'y')
- plt.show()
-
- return Sdi, Sai
def spread(array, no_steps):
@@ -796,7 +527,7 @@ def spread(array, no_steps):
def allocate_damage(igmr, PDM, disp, limitStates):
no_ls = len(limitStates)
- #print PDM[igmr, :]
+ #print(PDM[igmr, :])
PDM[igmr, 0] = PDM[igmr, 0]+1
ds = 0
for ils in range(no_ls):
@@ -806,8 +537,8 @@ def allocate_damage(igmr, PDM, disp, limitStates):
PDM[igmr, 0] = PDM[igmr, 0]-1
break
- #print disp
- #print PDM[igmr, :]
+ #print(disp)
+ #print(PDM[igmr, :])
return PDM, ds
def residuals(coeffs, y, x):
@@ -1170,55 +901,7 @@ def import_result(input_file):
result = np.genfromtxt(input_file,delimiter=',')
return result
-
-def output_conversions(fragility_model, output_type):
- fragility_model_converted = []
-
- if output_type == 'median-dispersion':
- for iDS in range(len(fragility_model)):
- median = np.exp(fragility_model[iDS][0][0])
- sigma = fragility_model[iDS][0][1]
- fragility_model_converted.append([[median, sigma], fragility_model[iDS][1]])
-
- outputs = ['median', 'dispersion']
-
- elif output_type == 'logmean-cov':
- for iDS in range(len(fragility_model)):
- sigma = fragility_model[iDS][0][1]
- median = np.exp(fragility_model[iDS][0][0])
- log_mean = median*np.exp(np.power(sigma, 2)/2)
- log_st_dev = np.sqrt((np.exp(np.power(sigma, 2))-1)*np.exp(2*np.log(median)+np.power(sigma, 2)))
- cov = log_st_dev/log_mean
- fragility_model_converted.append([[log_mean, cov], fragility_model[iDS][1]])
-
- outputs = ['mean of x', 'cov of x']
-
- elif output_type == 'mean-sigma':
- fragility_model_converted = fragility_model
- outputs = ['mean of ln(x)', 'st. dev. of ln(x)']
-
- return [fragility_model_converted, outputs]
-
-#def plot_fragility(fragility_model, minIML, maxIML):
-#
-# imls = np.linspace(minIML, maxIML, 100)
-# txt = []
-# colours = ['y', 'g', 'c', 'b', 'r', 'm', 'k']
-# for iDS in range(0, len(fragility_model[0])):
-# mu = fragility_model[0][iDS]
-# sigma = fragility_model[1][iDS]
-# txt.append('DS '+str(iDS+1))
-# if sigma <= 0:
-# y = np.zeros_like(imls)
-# y[imls>np.exp(mu)] = 1
-# else:
-# y = norm(mu, sigma).cdf(np.log(imls))
-# plt.plot(imls, y, color = colours[iDS], linewidth = 2)
-#
-# plt.xlabel('Spectral acceleration at T elastic, Sa(Tel) [g]', fontsize = 12)
-# plt.ylabel('Probabilty of Exceedance', fontsize = 12)
-# plt.suptitle('Fragility Curves', fontsize = 12)
-# plt.show()
+
def NigamJennings(time, acc, periods, damping):
@@ -1335,163 +1018,6 @@ def residual_lognormal_dist(coeffs, imls, fractions):
return residual
-def add_information(capacity_curves, attribute, type, data):
- #FIXME: type is a reserved keyword, change to something else
- no_capacity_curves = len(capacity_curves['Sa'])
-
- if attribute == 'heights' or attribute == 'periods' or attribute == 'gamma':
- if type == 'value':
- values = []
- for icc in range(no_capacity_curves):
- values.append(data)
- capacity_curves[attribute] = values
- elif type == 'vector':
- capacity_curves[attribute] = data
- elif type == 'calculate' and attribute == 'periods':
- periods = []
- for icc in range(no_capacity_curves):
- Sd = capacity_curves['Sd'][icc][data]
- Sa = capacity_curves['Sa'][icc][data]
- periods.append(2*math.pi*math.sqrt(Sd/(Sa*9.81)))
- capacity_curves[attribute] = periods
-
- elif attribute == 'yielding point':
- Sdy = []
- Say = []
- for icc in range(no_capacity_curves):
- Sdy.append(capacity_curves['Sd'][icc][data])
- Say.append(capacity_curves['Sa'][icc][data])
- capacity_curves['Sdy'] = Sdy
- capacity_curves['Say'] = Say
- else:
- print attribute + ' is not a recognized attribute. No information was added.'
-
- return capacity_curves
-
-
-def idealisation(typ, capacity_curves):
- idealised_capacity = []
- no_capacity_curves = len(capacity_curves['periods'])
- for icc in range(no_capacity_curves):
- if capacity_curves['type']== 'Vb-droof' or capacity_curves['type']== 'Vb-dfloor':
- droof = capacity_curves['droof'][icc]
- Vb = capacity_curves['Vb'][icc]
- else:
- droof = capacity_curves['Sd'][icc]
- Vb = capacity_curves['Sa'][icc]
- capacity_curves['idealised'] = 'FALSE'
- if typ == 'bilinear':
- idealised_capacity.append(bilinear(droof, Vb, capacity_curves['idealised']))
- elif typ == 'quadrilinear':
- idealised_capacity.append(quadrilinear(droof, Vb, capacity_curves['idealised']))
-
- return idealised_capacity
-
-def bilinear(droof, Vb, idealised):
-# FEMA method
- if idealised == 'FALSE':
- droof = np.array(droof)
- Fy = np.max(Vb)
- du = np.max(droof)
- for index, item in enumerate(Vb):
- if item >= Fy:
- break
- Ay = 0.6*Fy
- Ax = np.interp(Ay, Vb[0:index], droof[0:index])
- slp = Ay/Ax
- dy = Fy/slp
- else:
- dy = droof[1]
- du = droof[2]
- Fy = Vb[1]
-
- return [dy, du, Fy]
-
-def quadrilinear(droof, Vb, idealised):
- if idealised == 'FALSE':
- droof = np.array(droof)
- Fmax = np.max(Vb)
- for index, item in enumerate(Vb):
- if item >= Fmax:
- break
- fmax = index
- dmax = droof[index]
-
- # Yielding point:
- # Vulnerability guidelines method
- # Find yielding displacement with equal energy principle n the interval from 0 to Dmax
- Areas = np.array([(Vb[i+1]+Vb[i])/2 for i in range(0, fmax)])
- dd = np.array([droof[i+1]-droof[i] for i in range(0, fmax)])
- Edmax = np.sum(dd*Areas) #Area under the pushover curve in the interval from 0 to Dmax
- dy = 2*(dmax-Edmax/Fmax)
- Fy = Fmax
-
- # Onset of plateu
- # Find were slope of pushover curve before decreasing in the plateu
- Vb_norm = Vb[fmax::]/Fy
- d_norm = droof[fmax::]/dy
- slp = [(Vb_norm[i]-Vb_norm[i-1])/(d_norm[i]-d_norm[i-1]) for i in xrange(1, len(Vb_norm))]
- indy_soft = np.nonzero(abs(np.array(slp))>0.3)
- if len(indy_soft[0])>1:
- fmin = indy_soft[0][-1]+fmax
- Fmin = Vb[fmin]
- dmin = droof[fmin]
- # Onset of softening
- # Find yielding displacement with equal energy principle n the interval from Dmax to Dmin (onset of plateu)
- Areas = np.array([(Vb[i+1]+Vb[i])/2 for i in range(fmax, fmin)])
- dd = np.array([droof[i+1]-droof[i] for i in range(fmax, fmin)])
- Edmin = np.sum(dd*Areas)
- ds = 2/(Fmax-Fmin)*(Edmin - (dmin-dmax)*Fmax + 0.5*dmin*(Fmax-Fmin))
- du = np.max(droof)
- if dsdu: ds = du
- # Residual Plateu
- if len(indy_soft[0])>0:
- Areas = np.array([(Vb[i+1]+Vb[i])/2 for i in range(fmin, len(Vb)-1)])
- dd = np.array([droof[i+1]-droof[i] for i in range(fmin, len(Vb)-1)])
- Edplat = np.sum(dd*Areas)
- Fres = Edplat/(droof[-1]-dmin)
- slp_soft = abs((Fmax-Fmin)/(ds-dmin))
- dmin = dmin+(Fmin-Fres)/slp_soft
- if dmin>du:
- dmin = du
- Fmin = Vb[-1]
- else:
- Fmin = Fres
- else:
- fmin = len(Vb)-1
- Fmin = Fmax
- dmin = droof[fmin]
- ds = dmin
- du = dmin
- else:
- dy = droof[1]
- ds = droof[2]
- dmin = droof[3]
- du = droof[4]
- Fy = Vb[1]
- Fmax = Vb [2]
- Fmin = Vb[3]
-
- return [dy, ds, dmin, du, Fy, Fmax, Fmin]
-
-def get_spectral_ratios(capacity_curves, input_spectrum):
-
- Tuni = capacity_curves['mean_period']
- T = capacity_curves['periods']
-
- with open(input_spectrum, 'rb') as f:
- data = f.read()
- l = data.rstrip()
- lines = l.split('\n')
- data = [lines[i].split('\t') for i in range(0, len(lines))]
- Ts = np.array([float(ele[0]) for ele in data])
- Sa = np.array([float(ele[1]) for ele in data])
- S_Tuni = np.interp(Tuni, Ts, Sa)
- S_T = np.array([np.interp(ele, Ts, Sa) for ele in T])
- Sa_ratios = S_Tuni/S_T
-
- return Sa_ratios
def calculate_fragility_statistics(gmrs, PDM, T, damping, IMT, damage_model,
method, no_samples, size_sample):
@@ -1945,184 +1471,3 @@ def read_frag_model(input_file):
fragility_model['logstdev'] = log_stdev
return fragility_model
-
-def read_deformed_shape(damage_model, capacity_curves, icc):
-
- if damage_model['deformed shape'] != 'none':
- ISDvec, Sdvec = [],[]
- with open(damage_model['deformed shape'],'rU') as f:
- csv_reader = csv.reader(f)
- for row in csv_reader:
- if row[0] == 'ISD':
- ISDvec.append(np.array(row[1:],dtype = float))
- else:
- Sdvec.append(np.array(row[1:],dtype = float))
-
- try:
- ISDvec = np.array(ISDvec[icc])
- Sdvec = np.array(Sdvec[icc])
- except IndexError:
- ISDvec = np.array(ISDvec[0])
- Sdvec = np.array(Sdvec[0])
-
- else:
- # This is a simple assumption on the relationship between roof displacement and drift
- assert(capacity_curves['heights'] != []), 'If you want to use inter-storey drift damage model you should provide building height in capacity curve input file'
- RDvec = np.array(capacity_curves['Sd'][icc])*capacity_curves['gamma'][icc]
- Sdvec = np.array(capacity_curves['Sd'][icc])
- ISDvec = np.array(RDvec)/capacity_curves['heights'][icc]
-
- return [ISDvec, Sdvec]
-
-def check_SDOF_curves(capacity_curves):
-
- no_capacity_curves = len(capacity_curves['Sd'])
- for icc in range(no_capacity_curves):
-
- assert(len(capacity_curves['Sd'][icc]) <= 5), "Idealise capacity curves with a maximum of 5 points, "+str(len(capacity_curves['Sd'][icc]))+" given"
-
- if len(capacity_curves['Sd'][icc]) < 5:
- no_points = 5-len(capacity_curves['Sd'][icc])
- Sd = capacity_curves['Sd'][icc][:-1]
- for i in range(no_points):
- Sd.append(capacity_curves['Sd'][icc][-2]+(capacity_curves['Sd'][icc][-1] - capacity_curves['Sd'][icc][-2])/(no_points+1)*(i+1))
- Sd.append(capacity_curves['Sd'][icc][-1])
- Sa = np.interp(Sd,capacity_curves['Sd'][icc],capacity_curves['Sa'][icc])
- capacity_curves['Sd'][icc] = Sd
- capacity_curves['Sa'][icc] = Sa.tolist()
-
- return capacity_curves
-
-def aacp_calc(hzd_curve_file, frag_curves_file):
-
- # read and save frag curves
- # this reads the frag curves output files as given by rmtk
- beta=[]
- theta=[]
- ds_name=[]
- with open(frag_curves_file,'r') as f:
- header=f.readline()
- header=header.split(',')
- header2=f.readline()
- for line in f.readlines():
- line = line.split(',')
- ds_name.append(line[0])
- beta.append(float(line[2]))
- theta.append(float(line[5]))
-
- #read and save hzd curves
- lat=[]
- lon=[]
- rtn_period=[]
- x_axis=[]
- y_axis=[]
- lambda_vec=[]
-
- with open(hzd_curve_file,'r') as f:
- header=f.readline()
- header=header.split(',')
- rtn_period.append(float(header[1][-4:]))
- scnd_line=f.readline()
- scnd_line=scnd_line.split(',')
- scnd_line=scnd_line[3:]
- x_axis=[float(i.split(':', 1)[0]) for i in scnd_line]
- log_x_axis=[math.log(float(i.split(':', 1)[0])) for i in scnd_line]
- for line in f.readlines():
- line = line.split(',')
- lon.append(float(line[0]))
- lat.append(float(line[1]))
- temp=line[3:]
- for irow in range(len(temp)):
- value=float(temp[irow])
- y_axis.append(float(value))
- if ((value)-1)==0:
- lambda_vec.append(-1*math.log(1-float(0.999999999))/float(rtn_period[0]))
- else:
- lambda_vec.append(-1*math.log(1-value)/float(rtn_period[0]))
-
- # interpolation of hazard curve
- no_hzd_curves=int(len(y_axis)/len(x_axis))
- hzd_lambda=[]
- log_hzd_lambda=[]
- counter=-1
- for ihzd in range(no_hzd_curves):
- temp=[]
- temp_scnd=[]
- for irow in range(len(x_axis)):
- counter=counter+1
- temp.append(lambda_vec[counter])
- temp_scnd.append(math.log(lambda_vec[counter]))
- hzd_lambda.append(temp)
- log_hzd_lambda.append(temp_scnd)
-
- no_pts=2001
- imls=[]
- log_imls=[]
- incr=(x_axis[-1]-x_axis[1])/no_pts
- for iimls in range(1,no_pts):
- imls.append(float(incr*iimls))
- log_imls.append(math.log(float(incr*iimls)))
-
- imls_array=np.array(imls)
- log_x_axis_array=np.array(log_x_axis)
- log_hzd_lambda_array=np.array(log_hzd_lambda)
-
- inter_hzd=[]
- exp_inter_hzd=[]
- for ihzd in range(no_hzd_curves):
- funcint=interp1d(log_x_axis_array,log_hzd_lambda_array[ihzd])
- inter_hzd.append(funcint(log_imls))
-
- for ihzd in range(no_hzd_curves):
- temp=[]
- for ipts in range(len(log_imls)):
- temp.append(math.exp(inter_hzd[ihzd][ipts]))
- exp_inter_hzd.append(temp)
-
- # calc lambdaC and aapc
- dim=np.diff(imls_array)
- dlim=np.diff(exp_inter_hzd)
-
- tgt_iml=[]
-
- for ipts in range(1,len(imls)):
- tgt_iml.append((float(imls[ipts-1])+float(imls[ipts]))/2)
-
- beta_col=beta[-1]
- theta_col=theta[-1]
-
- frag_pts=[]
- for ipts in range(len(tgt_iml)):
- frag_pts.append(norm.cdf(math.log(float(tgt_iml[ipts])/float(theta_col))/beta_col))
-
- lambda_c=[]
- aapc=[]
- counter=0
- for ihzd in range(no_hzd_curves):
- temp=[]
- for ipts in range(len(tgt_iml)):
- counter=counter+1
- temp.append(float(frag_pts[ipts])*abs(dlim[ihzd][ipts]/dim[ipts])*dim[ipts])
- lambda_c.append(sum(i for i in temp))
- aapc.append(-1*math.exp(-1*(lambda_c[ihzd]))+1)
-
- # plot aacp per location
- objects=[]
- for ihzd in range(no_hzd_curves):
-
- # coordinates lat;lon of all locations
- objects.append('['+str(lat[ihzd])+';'+str(lon[ihzd])+']')
-
- y_pos = np.arange(len(objects))
- values=np.array(aapc)
-
- plt.bar(y_pos, values, align='center', alpha=0.5)
- plt.xticks(y_pos, objects,rotation=90)
- plt.xlabel('Coordinates ([lat;lon)]')
- plt.ylabel('AAPC')
- plt.yscale('log')
- plt.title('Average annual probability of collapse')
-
- plt.show()
-
- return lat,lon,aapc