Skip to content

Commit

Permalink
Merge pull request #262 from EGA-archive/develop
Browse files Browse the repository at this point in the history
Develop
  • Loading branch information
costero-e authored Jan 9, 2024
2 parents 58998f8 + d5aff22 commit 7b3630f
Show file tree
Hide file tree
Showing 8 changed files with 149 additions and 21 deletions.
1 change: 1 addition & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ To give the right permissions for AAI you will need to set the permissions of th
docker exec beacon-permissions bash permissions/permissions-ui/start.sh
```
Please, bear in mind that the name of the user has to be the same that you used when creating the user in LS or in IDP, whatever the AAI method you are working with.
To give a user a certain type of response for their queries, please modify this file [response_type.yml](https://github.com/EGA-archive/beacon2-ri-api/blob/master/beacon/request/response_type.yml) adding the maximum type of response you want to allow every user.

Also, you will need to edit the file [conf.py](beacon/conf.py) and introduce the domain where your keycloak is being hosted inside **ldp_user_info** and the issuers you trust for your token inside **trusted_issuers**. In case you want to run your local container, use this configuration:
```bash
Expand Down
12 changes: 6 additions & 6 deletions beacon/db/g_variants.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,18 +38,18 @@ def generate_position_filter_start(key: str, value: List[int]) -> List[Alphanume
if len(value) == 1:
filters.append(AlphanumericFilter(
id=VARIANTS_PROPERTY_MAP[key],
value=[value[0]],
value=value[0],
operator=Operator.GREATER_EQUAL
))
elif len(value) == 2:
filters.append(AlphanumericFilter(
id=VARIANTS_PROPERTY_MAP[key],
value=[value[0]],
value=value[0],
operator=Operator.GREATER_EQUAL
))
filters.append(AlphanumericFilter(
id=VARIANTS_PROPERTY_MAP[key],
value=[value[1]],
value=value[1],
operator=Operator.LESS_EQUAL
))
return filters
Expand All @@ -61,18 +61,18 @@ def generate_position_filter_end(key: str, value: List[int]) -> List[Alphanumeri
if len(value) == 1:
filters.append(AlphanumericFilter(
id=VARIANTS_PROPERTY_MAP[key],
value=[value[0]],
value=value[0],
operator=Operator.LESS_EQUAL
))
elif len(value) == 2:
filters.append(AlphanumericFilter(
id=VARIANTS_PROPERTY_MAP[key],
value=[value[0]],
value=value[0],
operator=Operator.GREATER_EQUAL
))
filters.append(AlphanumericFilter(
id=VARIANTS_PROPERTY_MAP[key],
value=[value[1]],
value=value[1],
operator=Operator.LESS_EQUAL
))
return filters
Expand Down
20 changes: 12 additions & 8 deletions beacon/response/build_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ def build_response_summary(exists, qparams, num_total_results):


def build_response_summary_by_dataset(exists, num_total_results, response_dict):
LOG.debug(response_dict)
count=0
try:
for k,v in response_dict.items():
Expand Down Expand Up @@ -174,7 +175,7 @@ def build_beacon_resultset_response_by_dataset(data,
dataset_id = dataset_dict['dataset']
response_dict[dataset_id] = []
dataset_ids_list.append(dataset_id)

LOG.debug(include)
if include == 'MISS':
for doc in data:
for dataset_dict in list_of_dataset_dicts:
Expand Down Expand Up @@ -311,14 +312,16 @@ def build_beacon_resultset_response_by_dataset(data,
dataset_id = dataset_dict['dataset']
response_dict[dataset_id].append(doc)
else:
if doc['id'] in dataset_dict['ids']:
dataset_id = dataset_dict['dataset']
response_dict[dataset_id].append(doc)
elif doc['id'] in dataset_dict['ids']:
dataset_id = dataset_dict['dataset']
response_dict[dataset_id].append(doc)
#LOG.debug(doc['id'])
if doc['id'] in dataset_dict['ids']:
dataset_id = dataset_dict['dataset']
response_dict[dataset_id].append(doc)
elif doc['id'] in dataset_dict['ids']:
dataset_id = dataset_dict['dataset']
response_dict[dataset_id].append(doc)
except Exception as e:
pass
#LOG.debug(response_dict)

limit= qparams.query.pagination.limit
length_to_rest=0
Expand All @@ -338,10 +341,11 @@ def build_beacon_resultset_response_by_dataset(data,
length_to_rest = len(response_dict[dataset_id])
else:
start_record = start_record - len(response_dict[dataset_id])





LOG.debug(response_dict)
beacon_response = {
'meta': build_meta(qparams, entity_schema, Granularity.RECORD),
'responseSummary': build_response_summary_by_dataset(num_total_results > 0, num_total_results, response_dict),
Expand Down
7 changes: 6 additions & 1 deletion deploy/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,11 @@ docker exec deploy_db_1 mongoimport --jsonArray --uri "mongodb://root:example@12

This loads the JSON files inside of the `data` folder into the MongoDB database container.

#### List the ids

After deploying all the data, you will need to tell the beacon which are the individual and biosample ids belonging to each dataset and cohort. In order to do that, please, add the name of each dataset with the respective array of all the ids together in this file [datasets.yml](https://github.com/EGA-archive/beacon2-ri-api/blob/master/beacon/request/datasets.yml).
Then, repeat the same for the cohorts modifying this file [cohorts.yml](https://github.com/EGA-archive/beacon2-ri-api/blob/master/beacon/request/cohorts.yml).

#### Create the indexes

You can create the necessary indexes running the following Python script:
Expand Down Expand Up @@ -188,7 +193,7 @@ But you can also use complex filters:
"query": {
"filters": [
{
"id": "UBERON:0001256",
"id": "UBERON:0000178",
"scope": "biosamples",
"includeDescendantTerms": false
}
Expand Down
2 changes: 1 addition & 1 deletion deploy/data/cohorts.json
Original file line number Diff line number Diff line change
@@ -1 +1 @@
[{"cohortType": "study-defined", "collectionEvents": [{"eventDiseases": {"availability": true, "availabilityCount": 1705, "distribution": {"diseases": {"acutebronchitis": 121, "agranulocytosis": 111, "asthma": 134, "bipolaraffectivedisorder": 134, "cardiomyopathy": 133, "dentalcaries": 139, "eatingdisorders": 134, "fibrosisandcirrhosisofliver": 132, "gastro-oesophagealrefluxdisease": 140, "haemorrhoids": 127, "influenzaduetocertainidentifiedinfluenzavirus": 135, "insulin-dependentdiabetesmellitus": 165, "irondeficiencyanaemia": 142, "multiplesclerosis": 125, "obesity": 136, "sarcoidosis": 136, "schizophrenia": 138, "thyroiditis": 141, "varicoseveinsoflowerextremities": 139}}}, "eventEthnicities": {"availability": true, "availabilityCount": 2287, "distribution": {"ethnicities": {"African": 119, "AnyotherAsianbackground": 120, "AnyotherBlackbackground": 104, "Anyothermixedbackground": 92, "Anyotherwhitebackground": 114, "AsianorAsianBritish": 125, "Bangladeshi": 96, "BlackorBlackBritish": 131, "British": 114, "Caribbean": 127, "Chinese": 100, "Indian": 110, "Irish": 111, "Mixed": 127, "Otherethnicgroup": 116, "Pakistani": 115, "White": 105, "WhiteandAsian": 114, "WhiteandBlackAfrican": 115, "WhiteandBlackCaribbean": 132}}}, "eventGenders": {"availability": true, "availabilityCount": 1597, "distribution": {"genders": {"female": 1271, "male": 1233}}}}], "id": "CINECA_synthetic_cohort_UK1", "inclusionCriteria": {"ageRange": {"end": {"iso8601duration": "P65Y"}, "start": {"iso8601duration": "P18Y"}}, "genders": [{"id": "NCIT:C16576", "label": "female"}, {"id": "NCIT:C20197", "label": "male"}], "locations": [{"id": "GAZ:00150372", "label": "UK"}]}, "name": "CINECA synthetic cohort UK1"}]
[{"cohortType": "study-defined", "collectionEvents": [{"eventDiseases": {"availability": true, "availabilityCount": 1705, "distribution": {"diseases": {"acutebronchitis": 121, "agranulocytosis": 111, "asthma": 134, "bipolaraffectivedisorder": 134, "cardiomyopathy": 133, "dentalcaries": 139, "eatingdisorders": 134, "fibrosisandcirrhosisofliver": 132, "gastro-oesophagealrefluxdisease": 140, "haemorrhoids": 127, "influenzaduetocertainidentifiedinfluenzavirus": 135, "insulin-dependentdiabetesmellitus": 165, "irondeficiencyanaemia": 142, "multiplesclerosis": 125, "obesity": 136, "sarcoidosis": 136, "schizophrenia": 138, "thyroiditis": 141, "varicoseveinsoflowerextremities": 139}}}, "eventEthnicities": {"availability": true, "availabilityCount": 2287, "distribution": {"ethnicities": {"African": 119, "AnyotherAsianbackground": 120, "AnyotherBlackbackground": 104, "Anyothermixedbackground": 92, "Anyotherwhitebackground": 114, "AsianorAsianBritish": 125, "Bangladeshi": 96, "BlackorBlackBritish": 131, "British": 114, "Caribbean": 127, "Chinese": 100, "Indian": 110, "Irish": 111, "Mixed": 127, "Otherethnicgroup": 116, "Pakistani": 115, "White": 105, "WhiteandAsian": 114, "WhiteandBlackAfrican": 115, "WhiteandBlackCaribbean": 132}}}, "eventGenders": {"availability": true, "availabilityCount": 1597, "distribution": {"genders": {"female": 1271, "male": 1233}}}, "eventLocations": {"availability": true, "availabilityCount": 1597, "distribution": {"locations": {"England": 322, "NorthernIreland": 317, "RepublicofIreland": 311, "Scotland": 308, "Wales": 339}}}}], "id": "CINECA_synthetic_cohort_UK1", "inclusionCriteria": {"ageRange": {"end": {"iso8601duration": "P65Y"}, "start": {"iso8601duration": "P18Y"}}, "genders": [{"id": "NCIT:C16576", "label": "female"}, {"id": "NCIT:C20197", "label": "male"}], "locations": [{"id": "GAZ:00150372", "label": "UK"}]}, "name": "CINECA synthetic cohort UK1"}]
115 changes: 115 additions & 0 deletions deploy/data/cohorts_av.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
[
{
"cohortType": "study-defined",
"collectionEvents": [
{
"eventDiseases": {
"availability": true,
"availabilityCount": 1700,
"distribution": {
"diseases": {
"acutebronchitis": 121,
"agranulocytosis": 111,
"asthma": 134,
"bipolaraffectivedisorder": 134,
"cardiomyopathy": 133,
"dentalcaries": 135,
"eatingdisorders": 134,
"fibrosisandcirrhosisofliver": 132,
"gastro-oesophagealrefluxdisease": 140,
"haemorrhoids": 127,
"influenzaduetocertainidentifiedinfluenzavirus": 135,
"insulin-dependentdiabetesmellitus": 165,
"irondeficiencyanaemia": 142,
"multiplesclerosis": 125,
"obesity": 136,
"sarcoidosis": 136,
"schizophrenia": 138,
"thyroiditis": 141,
"varicoseveinsoflowerextremities": 139
}
}
},
"eventEthnicities": {
"availability": true,
"availabilityCount": 54,
"distribution": {
"ethnicities": {
"African": 3,
"AnyotherAsianbackground": 4,
"AnyotherBlackbackground": 4,
"Anyothermixedbackground": 2,
"Anyotherwhitebackground": 2,
"AsianorAsianBritish": 4,
"Bangladeshi": 3,
"BlackorBlackBritish": 2,
"British": 4,
"Caribbean": 2,
"Chinese": 4,
"Indian": 2,
"Irish": 3,
"Mixed": 2,
"Otherethnicgroup": 4,
"Pakistani": 2,
"White": 2,
"WhiteandAsian": 3,
"WhiteandBlackAfrican": 3,
"WhiteandBlackCaribbean": 1
}
}
},
"eventGenders": {
"availability": true,
"availabilityCount": 3,
"distribution": {
"genders": {
"female": 2,
"male": 1
}
}
},
"eventLocations": {
"availability": true,
"availabilityCount": 1597,
"distribution": {
"locations": {
"England": 2,
"NorthernIreland": 2,
"RepublicofIreland": 3,
"Scotland": 4,
"Wales": 5
}
}
}
}
],
"id": "AV_synthetic_cohort",
"inclusionCriteria": {
"ageRange": {
"end": {
"iso8601duration": "P65Y"
},
"start": {
"iso8601duration": "P18Y"
}
},
"genders": [
{
"id": "NCIT:C16576",
"label": "female"
},
{
"id": "NCIT:C20197",
"label": "male"
}
],
"locations": [
{
"id": "GAZ:00150372",
"label": "UK"
}
]
},
"name": "AV synthetic cohort"
}
]
4 changes: 2 additions & 2 deletions deploy/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -179,8 +179,8 @@ services:
- 3000:3000
stdin_open: true
volumes:
- ./frontend/.env:/frontend/.env
- ./frontend/src/config.json:/frontend/src/config.json
- ../frontend/.env:/frontend/.env
- ../frontend/src/config.json:/frontend/src/config.json



9 changes: 6 additions & 3 deletions frontend/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,13 +1,16 @@
FROM node:19.0-alpine

# set working directory
WORKDIR /usr/src/app
WORKDIR ./frontend

# add _/usr/src/app/node_modules/.bin_ to $PATH
ENV PATH /usr/src/app/node_modules/.bin:$PATH
ENV PATH /frontend/node_modules/.bin:$PATH

# install app dependencies
COPY package.json .
COPY yarn.lock .
COPY public ./public
COPY src ./src

RUN yarn install --production

RUN yarn install --production

0 comments on commit 7b3630f

Please sign in to comment.